latest sources commit
This commit is contained in:
commit
749adf47ca
53
.devcontainer/devcontainer.json
Normal file
53
.devcontainer/devcontainer.json
Normal file
@ -0,0 +1,53 @@
|
||||
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
|
||||
{
|
||||
"name": "ac-dev-server",
|
||||
|
||||
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
|
||||
// set an empty array to automatically solve
|
||||
// the docker-compose files (including the .override.yml)
|
||||
// https://github.com/microsoft/vscode-remote-release/issues/1080#issuecomment-824213014
|
||||
// it requires vscode 1.57+
|
||||
"dockerComposeFile": [],
|
||||
// The 'service' property is the name of the service for the container that VS Code should
|
||||
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
|
||||
"service": "ac-dev-server",
|
||||
|
||||
// The optional 'workspaceFolder' property is the path VS Code should open by default when
|
||||
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
|
||||
"workspaceFolder": "/azerothcore",
|
||||
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": null
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"notskm.clang-tidy",
|
||||
"xaver.clang-format",
|
||||
"bbenoist.doxygen",
|
||||
"ms-vscode.cpptools",
|
||||
"austin.code-gnu-global",
|
||||
"twxs.cmake",
|
||||
"mhutchie.git-graph",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"cschlosser.doxdocgen",
|
||||
"sanaajani.taskrunnercode"
|
||||
],
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Uncomment the next line if you want start specific services in your Docker Compose config.
|
||||
"runServices": ["ac-dev-server", "ac-database"],
|
||||
|
||||
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
|
||||
// "shutdownAction": "none",
|
||||
|
||||
// Uncomment the next line to run commands after the container is created - for example installing curl.
|
||||
// "postCreateCommand": "apt-get update && apt-get install -y curl",
|
||||
|
||||
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "root"
|
||||
}
|
35
.devcontainer/docker-compose.yml
Normal file
35
.devcontainer/docker-compose.yml
Normal file
@ -0,0 +1,35 @@
|
||||
version: '3.9'
|
||||
services:
|
||||
# Update this to the name of the service you want to work with in your docker-compose.yml file
|
||||
ac-dev-server:
|
||||
# If you want add a non-root user to your Dockerfile, you can use the "remoteUser"
|
||||
# property in devcontainer.json to cause VS Code its sub-processes (terminals, tasks,
|
||||
# debugging) to execute as the user. Uncomment the next line if you want the entire
|
||||
# container to run as this user instead. Note that, on Linux, you may need to
|
||||
# ensure the UID and GID of the container user you create matches your local user.
|
||||
# See https://aka.ms/vscode-remote/containers/non-root for details.
|
||||
#
|
||||
# user: vscode
|
||||
|
||||
# Uncomment if you want to override the service's Dockerfile to one in the .devcontainer
|
||||
# folder. Note that the path of the Dockerfile and context is relative to the *primary*
|
||||
# docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile"
|
||||
# array). The sample below assumes your primary file is in the root of your project.
|
||||
#
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: .devcontainer/Dockerfile
|
||||
|
||||
#volumes:
|
||||
# Update this to wherever you want VS Code to mount the folder of your project
|
||||
#- .:/workspace:cached
|
||||
|
||||
# Uncomment the next line to use Docker from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker-compose for details.
|
||||
# - /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
|
||||
# cap_add:
|
||||
# - SYS_PTRACE
|
||||
# security_opt:
|
||||
# - seccomp:unconfined
|
||||
tty: true
|
14
.dockerignore
Normal file
14
.dockerignore
Normal file
@ -0,0 +1,14 @@
|
||||
/cmake-build-debug/*
|
||||
/build*/
|
||||
/var/*
|
||||
!/var/build/.gitkeep
|
||||
!/var/ccache/.gitkeep
|
||||
/env/dist/*
|
||||
!/env/dist/.gitkeep
|
||||
/env/user/*
|
||||
/.env*
|
||||
.idea
|
||||
!.gitkeep
|
||||
|
||||
# do not ignore the ccache folder (used by the ci)
|
||||
!/var/docker/ccache
|
17
.editorconfig
Normal file
17
.editorconfig
Normal file
@ -0,0 +1,17 @@
|
||||
[*]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 80
|
||||
|
||||
[*.{json,ts,js,yml}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
tab_width = 2
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
max_line_length = 80
|
53
.git_commit_template.txt
Normal file
53
.git_commit_template.txt
Normal file
@ -0,0 +1,53 @@
|
||||
### TITLE
|
||||
## Type(Scope/Subscope): Commit ultra short explanation
|
||||
## |---- Write below the examples with a maximum of 50 characters ----|
|
||||
## Example 1: fix(DB/SAI): Missing spell to NPC Hogger
|
||||
## Example 2: fix(CORE/Raid): Phase 2 of Ragnaros
|
||||
## Example 3: feat(CORE/Commands): New GM command to do something
|
||||
|
||||
|
||||
### DESCRIPTION
|
||||
## Explain why this change is being made, what does it fix etc...
|
||||
## |---- Write below the examples with a maximum of 72 characters per lines ----|
|
||||
## Example: Hogger (id: 492) was not charging player when being engaged.
|
||||
|
||||
|
||||
## Provide links to any issue, commit, pull request or other resource
|
||||
## Example 1: Closes AzerothCore issue #23
|
||||
## Example 2: Ported from other project's commit (link)
|
||||
## Example 3: References taken from wowpedia / wowhead / wowwiki / https://wowgaming.altervista.org/aowow/
|
||||
|
||||
|
||||
### CO-AUTHOR(S)
|
||||
## If there are more authors they can be mentioned like this
|
||||
## Co-authored-by: name <name@example.com>
|
||||
|
||||
|
||||
## =======================================================
|
||||
## EXTRA INFOS
|
||||
## =======================================================
|
||||
## "Type" can be:
|
||||
## feat (new feature)
|
||||
## fix (bug fix)
|
||||
## refactor (refactoring production code)
|
||||
## style (formatting, missing semi colons, etc; no code change)
|
||||
## docs (changes to documentation)
|
||||
## test (adding or refactoring tests; no production code change)
|
||||
## chore (updating bash scripts, git files etc; no production code change)
|
||||
## --------------------
|
||||
## Remember to
|
||||
## Capitalize the subject line
|
||||
## Use the imperative mood in the subject line
|
||||
## Do not end the subject line with a period
|
||||
## Separate subject from body with a blank line
|
||||
## Use the body to explain what and why rather than how
|
||||
## Can use multiple lines with "-" for bullet points in body
|
||||
## --------------------
|
||||
## More info here https://www.conventionalcommits.org/en/v1.0.0-beta.2/
|
||||
## =======================================================
|
||||
## "Scope" can be:
|
||||
## CORE (core related, c++)
|
||||
## DB (database related, sql)
|
||||
## =======================================================
|
||||
## "Subscope" is optional and depends on the nature of the commit.
|
||||
## =======================================================
|
30
.gitattributes
vendored
Normal file
30
.gitattributes
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
# Auto detect text files and perform LF normalization
|
||||
* text eol=lf
|
||||
|
||||
# Whitespace rules
|
||||
# strict (no trailing, no tabs)
|
||||
*.cpp whitespace=trailing-space,space-before-tab,tab-in-indent,cr-at-eol
|
||||
*.h whitespace=trailing-space,space-before-tab,tab-in-indent,cr-at-eol
|
||||
|
||||
# normal (no trailing)
|
||||
*.sql whitespace=trailing-space,space-before-tab,cr-at-eol
|
||||
*.txt whitespace=trailing-space,space-before-tab,cr-at-eol
|
||||
|
||||
# special files which must ignore whitespace
|
||||
*.patch whitespace=-trailing-space eol=lf
|
||||
*.diff whitespace=-trailing-space eol=lf
|
||||
|
||||
# Standard to msysgit
|
||||
*.doc diff=astextplain
|
||||
*.DOC diff=astextplain
|
||||
*.docx diff=astextplain
|
||||
*.DOCX diff=astextplain
|
||||
*.dot diff=astextplain
|
||||
*.DOT diff=astextplain
|
||||
*.pdf diff=astextplain
|
||||
*.PDF diff=astextplain
|
||||
*.rtf diff=astextplain
|
||||
*.RTF diff=astextplain
|
||||
|
||||
# Ignore sql/* files
|
||||
data/sql/* linguist-documentation
|
76
.github/CODE_OF_CONDUCT.md
vendored
Normal file
76
.github/CODE_OF_CONDUCT.md
vendored
Normal file
@ -0,0 +1,76 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the admin team at https://discord.gg/gkt4y2x. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
48
.github/CONTRIBUTING.md
vendored
Normal file
48
.github/CONTRIBUTING.md
vendored
Normal file
@ -0,0 +1,48 @@
|
||||
## CONTRIBUTING
|
||||
|
||||
AzerothCore is a learning project, and there are lots of different ways to contribute to the project:
|
||||
|
||||
* By [testing our fixes](http://www.azerothcore.org/wiki/How-to-test-a-PR) (we can teach you how to correctly use Git to help us but that will also help you out tremendously)
|
||||
* By developing directly to the core or the modules
|
||||
* By reporting bugs within the project
|
||||
* By [creating new modules](http://www.azerothcore.org/wiki/Create-a-Module)
|
||||
* By improving our wiki
|
||||
* By providing direct support to our community (on discord, StackOverflow or specialized forums)
|
||||
* By making extra content (video tutorial for example)
|
||||
* By putting bounties on issues
|
||||
|
||||
If you want to contribute to the project, you will find a lot of resources that will guide you in our wiki.
|
||||
|
||||
Feel free to join our [Discord](https://discord.gg/gkt4y2x) server where we are happy to help new people to get started!
|
||||
|
||||
## AUTHORS & CONTRIBUTORS
|
||||
|
||||
This project exists thanks to:
|
||||
|
||||
- **The [AzerothCore developers and contributors](https://github.com/AzerothCore/azerothcore-wotlk/graphs/contributors)**
|
||||
- The [SunwellCore developers xinef and pussywizard](http://www.azerothcore.org/pages/sunwell.pl/)
|
||||
- All the [TrinityCore developers and contributors](https://github.com/TrinityCore/TrinityCore/blob/3.3.5/AUTHORS)
|
||||
- All the [MaNGOS, ScriptDev2 and UDB developers and contributors](https://github.com/cmangos/mangos-wotlk/blob/master/AUTHORS.md)
|
||||
|
||||
Check [AUTHORS](https://github.com/azerothcore/azerothcore-wotlk/blob/master/AUTHORS) for more details.
|
||||
|
||||
## IMPORTANT LINKS
|
||||
|
||||
- [Installation Guide](https://www.azerothcore.org/wiki/installation)
|
||||
- [Code of Conduct](https://github.com/azerothcore-wotlk/.github/code_of_conduct.md)
|
||||
- [FAQ](https://www.azerothcore.org/wiki/faq)
|
||||
- [Common Errors](https://www.azerothcore.org/wiki/common-errors)
|
||||
|
||||
- [Website](http://www.azerothcore.org/)
|
||||
- [Our wiki](http://www.azerothcore.org/wiki "Easy to use and developed by AzerothCore founder")
|
||||
- [AzerothCore catalogue](http://www.azerothcore.org/catalogue/ "Modules, tools, and other things for AzerothCore") (modules, tools etc...)
|
||||
- [Module template / Module skeleton](https://github.com/azerothcore/skeleton-module/)
|
||||
- [Discord](https://discord.gg/gkt4y2x)
|
||||
- [Our Facebook page](https://www.facebook.com/AzerothCore/)
|
||||
- [Our LinkedIn page](https://www.linkedin.com/company/azerothcore/)
|
||||
|
||||
All contributions, big or small, are appreciated <3
|
||||
|
||||
The AzerothCore Staff appreciate all the help and contribution that *you* put your time into.
|
||||
|
||||
Thank you!
|
1
.github/FUNDING.yml
vendored
Normal file
1
.github/FUNDING.yml
vendored
Normal file
@ -0,0 +1 @@
|
||||
custom: https://www.paypal.com/donate/?hosted_button_id=L69ANPSR8BJDU
|
94
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
94
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@ -0,0 +1,94 @@
|
||||
name: Bug report
|
||||
description: Create a bug report to help us improve.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
|
||||
An issue that is not properly filled out will be closed.
|
||||
You can read more about the standards for a bug report [here](https://www.azerothcore.org/wiki/issue-tracker-standards).
|
||||
- type: textarea
|
||||
id: current
|
||||
attributes:
|
||||
label: Current Behaviour
|
||||
description: |
|
||||
Description of the problem or issue here.
|
||||
Include entries of affected creatures / items / quests / spells etc.
|
||||
If this is a crash, post the crashlog (upload to https://gist.github.com/) and include the link here.
|
||||
Never upload files! Use GIST for text and YouTube for videos!
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected Blizzlike Behaviour
|
||||
description: |
|
||||
Tell us what should happen instead.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: source
|
||||
attributes:
|
||||
label: Source
|
||||
description: |
|
||||
If you have a source that proves how it is supposed to work, please add that to make it easier for devs to fix the issue.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: reproduce
|
||||
attributes:
|
||||
label: Steps to reproduce the problem
|
||||
description: |
|
||||
What does someone else need to do to encounter the same bug?
|
||||
placeholder: |
|
||||
1. Step 1
|
||||
2. Step 2
|
||||
3. Step 3
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: extra
|
||||
attributes:
|
||||
label: Extra Notes
|
||||
description: |
|
||||
Do you have any extra notes that can help solve the issue that does not fit any other field?
|
||||
placeholder: |
|
||||
None
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: commit
|
||||
attributes:
|
||||
label: AC rev. hash/commit
|
||||
description: |
|
||||
Copy the result of the `.server debug` command (if you need to run it from the client get a prat addon)
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating system
|
||||
description: |
|
||||
The Operating System the Server is running on.
|
||||
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: custom
|
||||
attributes:
|
||||
label: Custom changes or Modules
|
||||
description: |
|
||||
List which custom changes or modules you have applied, i.e. Eluna module, etc.
|
||||
placeholder: |
|
||||
None
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your contribution.
|
||||
If you use AzerothCore regularly, we really NEED your help to:
|
||||
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
|
||||
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
|
||||
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
|
||||
With your help, the project can evolve much quicker!
|
20
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
20
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
blank_issues_enabled: true
|
||||
contact_links:
|
||||
- name: Website
|
||||
url: https://www.azerothcore.org
|
||||
about: On the AC website you can find a lot of things, such as existing modules.
|
||||
- name: Wiki
|
||||
url: https://www.azerothcore.org/wiki
|
||||
about: You can find plenty of information on our Wiki.
|
||||
- name: How to ask for help
|
||||
url: https://www.azerothcore.org/wiki/How-to-ask-for-help
|
||||
about: Before submitting an issue we'd love if you take a minute to read this.
|
||||
- name: FAQ
|
||||
url: https://www.azerothcore.org/wiki/faq
|
||||
about: Frequently asked questions.
|
||||
- name: Common Errors
|
||||
url: https://www.azerothcore.org/wiki/common-errors
|
||||
about: You can find common errors and their solutions here.
|
||||
- name: Discord
|
||||
url: https://discord.gg/gkt4y2x
|
||||
about: Join the discussions over at our Discord Server.
|
43
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
43
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
name: Feature request
|
||||
description: Suggest an idea for this project
|
||||
title: "Feature: "
|
||||
labels: [Suggestion]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for taking your time to fill out a feature request. Remember to fill out all fields including the title above.
|
||||
An issue that is not properly filled out will be closed.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe your feature request or suggestion in detail
|
||||
description: |
|
||||
A clear and concise description of what you want to happen.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: solution
|
||||
attributes:
|
||||
label: Describe a possible solution to your feature or suggestion in detail
|
||||
description: |
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context or screenshots about the feature request here.
|
||||
validations:
|
||||
required: false
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for your contribution.
|
||||
If you use AzerothCore regularly, we really NEED your help to:
|
||||
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
|
||||
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
|
||||
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
|
||||
With your help, the project can evolve much quicker!
|
87
.github/README.md
vendored
Normal file
87
.github/README.md
vendored
Normal file
@ -0,0 +1,87 @@
|
||||
#  AzerothCore
|
||||
|
||||
[](https://www.codefactor.io/repository/github/azerothcore/azerothcore-wotlk)
|
||||
[](https://stackoverflow.com/questions/tagged/azerothcore?sort=newest "Ask / browse questions here")
|
||||
[](https://discord.gg/gkt4y2x "Our community hub on Discord")
|
||||
|
||||
## Build Status
|
||||
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core_matrix_build.yml)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions?query=workflow%3Acore-modules-build+branch%3Amaster+event%3Apush)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions?query=workflow%3Awindows-build+branch%3Amaster+event%3Apush)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions?query=workflow%3Amacos-build+branch%3Amaster+event%3Apush)
|
||||
[](https://github.com/azerothcore/azerothcore-wotlk/actions?query=workflow%3Adocker-build+branch%3Amaster+event%3Apush)
|
||||
|
||||
## Introduction
|
||||
|
||||
AzerothCore is an open-source game server application and framework designed for hosting massively multiplayer online role-playing games (MMORPGs). It is based on the popular MMORPG World of Warcraft (WoW) and seeks to recreate the gameplay experience of the original game from patch 3.3.5a.
|
||||
|
||||
The original code is based on MaNGOS, TrinityCore, and SunwellCore and has since then had extensive development to improve stability, in-game mechanics, and modularity to the game. AC has also grown into a community-driven project with a significant number of contributors and developers. It is written in C++ and provides a solid foundation for creating private servers that mimic the mechanics and behavior of the official WoW servers.
|
||||
|
||||
## Philosophy
|
||||
|
||||
Our main goal is to create a playable game server, offering a fully working in-game experience.
|
||||
|
||||
Here are the main points we focus on:
|
||||
|
||||
* Stability
|
||||
* We make sure all changes pass the CIs before being merged into the master branch.
|
||||
|
||||
* Blizzlike content
|
||||
* We strive to make all in-game content to be blizzlike. Therefore we have a high standard for fixes being made.
|
||||
|
||||
* Customization
|
||||
* It is easy to customize your experience using [modules](#modules).
|
||||
|
||||
* Community driven
|
||||
* AzerothCore has an active community of developers, contributors, and users who collaborate, share knowledge, and provide support through forums, Discord channels, and other communication platforms.
|
||||
|
||||
### Modules
|
||||
|
||||
AzerothCore is designed to be highly modular, allowing developers to extend and customize the game to suit their preferences or create unique gameplay experiences. This flexibility enables the addition of custom features, content, and modifications.
|
||||
|
||||
We have a lot of modules already made by the community, many of which can be found in the [Module Catalogue](https://www.azerothcore.org/catalogue.html#/).
|
||||
|
||||
## Installation
|
||||
|
||||
Detailed installation instructions are available [here](http://www.azerothcore.org/wiki/installation).
|
||||
|
||||
## Contributing
|
||||
|
||||
AzerothCore can also serve as a learning resource for aspiring developers who want to understand how WoW servers work, how MMORPGs are structured, how game server emulators are created, or to improve their C++ and SQL knowledge.
|
||||
|
||||
If you want to contribute to the project, you will find a lot of resources that will guide you in our [wiki](https://www.azerothcore.org/wiki/contribute).
|
||||
|
||||
We also recommend you read our [Contributor Covenant Code of Conduct](https://github.com/azerothcore/azerothcore-wotlk/blob/master/.github/CODE_OF_CONDUCT.md).
|
||||
|
||||
Feel free to join our [Discord server](https://discord.gg/gkt4y2x).
|
||||
|
||||
Click on the "⭐ Star" button to help us gain more visibility on Github!
|
||||
|
||||
## Authors & Contributors
|
||||
|
||||
This project exists thanks to the [authors](https://github.com/azerothcore/azerothcore-wotlk/blob/master/AUTHORS).
|
||||
|
||||
## Important Links
|
||||
|
||||
- [Doxygen documentation](https://www.azerothcore.org/pages/doxygen/index.html)
|
||||
- [Website](http://www.azerothcore.org/)
|
||||
- [AzerothCore catalogue](http://www.azerothcore.org/catalogue.html "Modules, tools, and other stuff for AzerothCore") (modules, tools, etc...)
|
||||
- [Our Discord server](https://discord.gg/gkt4y2x)
|
||||
- [Our wiki](http://www.azerothcore.org/wiki "Easy to use and developed by AzerothCore founder")
|
||||
- [Our forum](https://github.com/azerothcore/azerothcore-wotlk/discussions/)
|
||||
- [Our Facebook page](https://www.facebook.com/AzerothCore/)
|
||||
- [Our LinkedIn page](https://www.linkedin.com/company/azerothcore/)
|
||||
|
||||
## License
|
||||
|
||||
- The new AzerothCore source components are released under the [GNU AGPL v3](https://github.com/azerothcore/azerothcore-wotlk/blob/master/LICENSE-AGPL3)
|
||||
- The old sources based on MaNGOS/TrinityCore are released under the [GNU GPL v2](https://github.com/azerothcore/azerothcore-wotlk/blob/master/LICENSE-GPL2)
|
||||
|
||||
It's important to note that AzerothCore is not an official Blizzard Entertainment product, and it is not affiliated with or endorsed by World of Warcraft or Blizzard Entertainment. AzerothCore does not in any case sponsor nor support illegal public servers. If you use this project to run an illegal public server and not for testing and learning it is your own personal choice.
|
||||
|
||||
## Special thanks
|
||||
|
||||
[JetBrains](https://www.jetbrains.com/?from=AzerothCore) is providing free [open-source licenses](https://www.jetbrains.com/community/opensource/) to the AzerothCore developers.
|
||||
|
||||
[](https://www.jetbrains.com/?from=AzerothCore)
|
75
.github/SECURITY.md
vendored
Normal file
75
.github/SECURITY.md
vendored
Normal file
@ -0,0 +1,75 @@
|
||||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
We support the following versions of dependencies.
|
||||
|
||||
:white_check_mark: = supported
|
||||
|
||||
:red_circle: = NOT supported
|
||||
|
||||
unspecified = might work but no guarantee
|
||||
|
||||
Versions of AzerothCore:
|
||||
|
||||
| AzerothCore Branch | Supported |
|
||||
| ------------------ | ------------------ |
|
||||
| master | :white_check_mark: |
|
||||
|
||||
Versions of MySQL:
|
||||
|
||||
| MySQL Version | Supported |
|
||||
| ------------- | ------------------ |
|
||||
| 8.1 | :white_check_mark: |
|
||||
| 8.0 | :white_check_mark: |
|
||||
| 5.7 | :white_check_mark: |
|
||||
| 5.6 and lower | :red_circle: |
|
||||
|
||||
Versions of MariaDB:
|
||||
|
||||
| MariaDB Version | Supported |
|
||||
| --------------- | ------------------ |
|
||||
| 10.6 | :white_check_mark: |
|
||||
| 10.5 | :white_check_mark: |
|
||||
| 10.4 and lower | :red_circle: |
|
||||
|
||||
Versions of CLang:
|
||||
|
||||
| CLang Version | Supported |
|
||||
| ------------- | ------------------ |
|
||||
| 12 | :white_check_mark: |
|
||||
| 11 | :white_check_mark: |
|
||||
| 10 | :white_check_mark: |
|
||||
| 9 and lower | :red_circle: |
|
||||
|
||||
Versions of GCC:
|
||||
|
||||
| GCC Version | Supported |
|
||||
| ----------- | ------------------ |
|
||||
| 10 | :white_check_mark: |
|
||||
| 9 | :white_check_mark: |
|
||||
| 8 | :white_check_mark: |
|
||||
| 7 and lower | :red_circle: |
|
||||
|
||||
Versions of Ubuntu:
|
||||
|
||||
| Ubuntu version | Supported |
|
||||
| -------------- | ------------------ |
|
||||
| 20.04 | :white_check_mark: |
|
||||
| 18.04 and lower| :red_circle: |
|
||||
|
||||
Versions of macOS:
|
||||
|
||||
| macOS Version | Supported |
|
||||
| -------------- | ------------------ |
|
||||
| 12 | :white_check_mark: |
|
||||
| 11 | :white_check_mark: |
|
||||
| 10.15 and lower| :red_circle: |
|
||||
|
||||
**Note**: We do NOT support any repacks that may or may not have been made based on AzerothCore. Nor do we support any of the releases that are made under AzerothCore.
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We class a vulnerability to be any hack or exploit that has an impact on the server performance or that gives unfair advantages in the game (e.g fly hacking or injection tools).
|
||||
|
||||
If a new vulnerability is found you should always create a new [bug report](https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose).
|
97
.github/SUPPORT.md
vendored
Normal file
97
.github/SUPPORT.md
vendored
Normal file
@ -0,0 +1,97 @@
|
||||
# How to ask for help
|
||||
|
||||
### Need help?
|
||||
|
||||
Do you need support with AzerothCore? No worries, we're happy to help you!
|
||||
|
||||
Whether you have troubles installing AzerothCore, or you want to ask how-to or generic help questions, **we will help you**.
|
||||
|
||||
We just want you to ask for support in the **proper way**. Please read this document before asking for any help.
|
||||
|
||||
### Why is the "proper way" so important?
|
||||
|
||||
If you ask a question directly in the chat, it can get easily lost and you might never be helped unless there is someone online in that specific moment that can help you.
|
||||
|
||||
If you put your question on StackOverflow it will **stay** there and someone can help you at any moment. You can also link your question in the chat to give it more visibility. Doesn't make sense for you? Just keep reading.
|
||||
|
||||
Also, most of the questions asked by users are repetitive. So we need an efficient way to handle all the support requests. We use StackOverflow for support questions and GitHub for bug reports.
|
||||
|
||||
Sounds complicated? Not at all! **Just keep reading**.
|
||||
|
||||
### Bug reports
|
||||
|
||||
A game feature (e.g. spell/quest/talent/etc..) doesn't work as it is supposed to?
|
||||
|
||||
Congratulations! You've just found a bug, please search among the [existing issues](https://github.com/azerothcore/azerothcore-wotlk/issues). There is a good chance that someone else has already reported the same bug that you found, in such case we kindly ask you to "confirm" it by leaving a comment.
|
||||
|
||||
Example:
|
||||

|
||||
|
||||
If after searching for an existing issue report, you didn't find any, then you should [open a new issue](https://github.com/azerothcore/azerothcore-wotlk/issues/new).
|
||||
|
||||
### Other support requests
|
||||
|
||||
- Getting an error while installing AzerothCore?
|
||||
- Is there something in the documentation that is not clear for you?
|
||||
- Do you want to know how to do something specific with AzerothCore?
|
||||
- Are you trying to do something like implementing a new feature, fixing a bug, etc...?
|
||||
- Do you have questions about C++ or SQL code that is related to AzerothCore or other AC custom modules?
|
||||
|
||||
These kinds of questions are considered support questions and are handled via [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore). **Read below**
|
||||
|
||||
### Do I need to register in StackOverflow?
|
||||
|
||||
**Not necessarily!** If you have a Google or a Facebook account, you can already log in [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore) (click the **_Log in_** button on the top-right corner of the website).
|
||||
|
||||

|
||||
|
||||
|
||||
### Search among the existing questions
|
||||
|
||||
There is a good chance that your question has already been asked by someone else, so please **search** it on [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore).
|
||||
|
||||
For example, are you getting an error from MySQL? Search it under the **[azerothcore]** tag!
|
||||
|
||||

|
||||
|
||||
Oh, there is already a question! And it has an answer! Let's open it!
|
||||
|
||||

|
||||
|
||||
**Another example**, imagine you're wondering whether you can use LUA scripts on AzerothCore:
|
||||
|
||||

|
||||
|
||||
There is already a question and answer for that! Let's upvote the guys who asked and answered:
|
||||
|
||||

|
||||
|
||||
|
||||
## How to ask new questions
|
||||
|
||||
What if you can't find a question related to my specific problem? **Let's [open a new question](https://stackoverflow.com/questions/ask)**.
|
||||
|
||||
Remember to:
|
||||
|
||||
- Add a descriptive message.
|
||||
- **Bad** example `I got DB error plz help me`.
|
||||
- **Good** example: `After importing the sql updates, I get the error "XXX"`
|
||||
|
||||
- Don't forget the **[azerothcore]** tag!
|
||||
- 
|
||||
|
||||
- It's useful to add **4 more tags** according to the category of your question (e.g. `c++`, `docker`, `MySQL`, `sql`, etc... ).
|
||||
|
||||
- Link your question in the #support-general channel of our [Discord chat](https://discordapp.com/channels/217589275766685707/284406375495368704)
|
||||
|
||||
- Read: [stackoverflow.com/help/how-to-ask](https://stackoverflow.com/help/how-to-ask)
|
||||
|
||||
|
||||
### Share your knowledge!
|
||||
|
||||
Do you have anything to share with the community? Do you feel like some information could be useful to someone else (or to yourself in the future)? Have you solved a problem that took a while to figure out?
|
||||
|
||||
Ask a question on StackOverflow and then **answer it yourself**! Is it allowed? Yes, it is! Read this:
|
||||
|
||||
[https://stackoverflow.com/help/self-answer](https://stackoverflow.com/help/self-answer)
|
||||
|
37
.github/labeler.yml
vendored
Normal file
37
.github/labeler.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
file-cpp:
|
||||
- src/**/*.cpp
|
||||
- src/**/*.h
|
||||
- deps/**/*.cpp
|
||||
- deps/**/*.h
|
||||
|
||||
DB:
|
||||
- data/**/*.sql
|
||||
|
||||
CORE:
|
||||
- any: ['src/**/*', '!src/server/scripts/*', '!src/tests/*']
|
||||
|
||||
Script:
|
||||
- src/server/scripts/**/*.cpp
|
||||
- src/server/scripts/**/*.h
|
||||
|
||||
UnitTests:
|
||||
- src/test/**/*
|
||||
|
||||
Documentation:
|
||||
- ./*.md
|
||||
|
||||
Bash:
|
||||
- ./*.sh
|
||||
- apps/**/*.sh
|
||||
- conf/**/*.sh
|
||||
- deps/**/*.sh
|
||||
- modules/**/*.sh
|
||||
|
||||
CMake:
|
||||
- ./*.cmake
|
||||
|
||||
Workflow:
|
||||
- .github/workflows/*
|
||||
|
||||
Batch:
|
||||
- apps/**/*.bat
|
106
.github/workflows/add-to-project.yml
vendored
Normal file
106
.github/workflows/add-to-project.yml
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
name: Auto Assign to Project(s)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened, labeled]
|
||||
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
jobs:
|
||||
assign_one_project:
|
||||
runs-on: ubuntu-latest
|
||||
name: Assign to One Project
|
||||
steps:
|
||||
|
||||
- name: Assign issues with `ChromieCraft Generic` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, 'ChromieCraft Generic')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/20'
|
||||
|
||||
- name: Assign issues with `1-19` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '1-19')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/14'
|
||||
|
||||
- name: Assign issues with `20-29` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '20-29')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/17'
|
||||
|
||||
- name: Assign issues with `30-39` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '30-39')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/23'
|
||||
|
||||
- name: Assign issues with `40-49` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '40-49')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/24'
|
||||
|
||||
- name: Assign issues with `50-59` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '50-59')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/25'
|
||||
|
||||
- name: Assign issues with `60` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '60')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/22'
|
||||
|
||||
- name: Assign issues with `61-64` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '61-64')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/32'
|
||||
|
||||
- name: Assign issues with `65-69` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '65-69')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/36'
|
||||
|
||||
- name: Assign issues with `70` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '70')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/26'
|
||||
|
||||
- name: Assign issues with `71-74` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '71-74')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/33'
|
||||
|
||||
- name: Assign issues with `75-79` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '75-79')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/37'
|
||||
|
||||
- name: Assign issues with `80` label to their project
|
||||
uses: srggrs/assign-one-project-github-action@1.2.1
|
||||
if: |
|
||||
contains(github.event.issue.labels.*.name, '80')
|
||||
with:
|
||||
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/38'
|
||||
|
49
.github/workflows/build_dbimport.yml
vendored
Normal file
49
.github/workflows/build_dbimport.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: build-db
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master' # only default branch
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# the result of the matrix will be the combination of all attributes, so we get os*compiler*modules builds
|
||||
os: [ubuntu-20.04]
|
||||
compiler: [clang]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.compiler }}
|
||||
env:
|
||||
COMPILER: ${{ matrix.compiler }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-db
|
||||
with:
|
||||
path: var/ccache
|
||||
key: ${{ env.cache-name }}-${{ matrix.os }}-${{ matrix.compiler }}-${{ github.ref }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ env.cache-name }}-${{ matrix.os }}-${{ matrix.compiler }}-${{ github.ref }}-
|
||||
${{ env.cache-name }}-${{ matrix.os }}-${{ matrix.compiler }}-
|
||||
${{ env.cache-name }}-${{ matrix.os }}-
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
- name: Create conf/config.sh
|
||||
run: source ./apps/ci/ci-conf-db.sh
|
||||
- name: Build
|
||||
run: source ./apps/ci/ci-compile.sh
|
||||
- name: Process pending sql
|
||||
run: bash bin/acore-db-pendings
|
||||
- name: Dry run
|
||||
run: source ./apps/ci/ci-dry-run.sh dbimport
|
12
.github/workflows/check_pending_sql.yml
vendored
Normal file
12
.github/workflows/check_pending_sql.yml
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
name: Check pending SQL
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
check-pending-sql:
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check pending SQL
|
||||
run: source ./apps/ci/ci-pending.sh
|
17
.github/workflows/codestyle.yml
vendored
Normal file
17
.github/workflows/codestyle.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
||||
name: check-codestyle
|
||||
on:
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
check-codestyle:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
name: check codestyle
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Check core codestyle
|
||||
run: source ./apps/ci/ci-codestyle.sh
|
52
.github/workflows/core_build.yml
vendored
Normal file
52
.github/workflows/core_build.yml
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
name: main
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# the result of the matrix will be the combination of all attributes, so we get os*compiler builds
|
||||
os: [ubuntu-20.04]
|
||||
compiler: [clang]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler }}
|
||||
env:
|
||||
COMPILER: ${{ matrix.compiler }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: var/ccache
|
||||
key: ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ github.ref }}:${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ github.ref }}
|
||||
ccache:${{ matrix.os }}:${{ matrix.compiler }}
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
- name: Create conf/config.sh
|
||||
run: source ./apps/ci/ci-conf-core.sh
|
||||
- name: Process pending sql
|
||||
run: bash bin/acore-db-pendings
|
||||
- name: Build
|
||||
run: source ./apps/ci/ci-compile.sh
|
||||
- name: Dry run authserver
|
||||
run: source ./apps/ci/ci-dry-run.sh authserver
|
||||
- name: Dry run worldserver
|
||||
run: source ./apps/ci/ci-dry-run.sh worldserver
|
||||
- name: Check startup errors
|
||||
run: source ./apps/ci/ci-error-check.sh
|
||||
- name: Run unit tests
|
||||
run: source ./apps/ci/ci-run-unit-tests.sh
|
69
.github/workflows/core_matrix_build.yml
vendored
Normal file
69
.github/workflows/core_matrix_build.yml
vendored
Normal file
@ -0,0 +1,69 @@
|
||||
name: core
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: ['labeled', 'opened', 'synchronize', 'reopened']
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# the result of the matrix will be the combination of all attributes, so we get os*compiler builds
|
||||
include:
|
||||
- os: ubuntu-20.04
|
||||
compiler: clang12
|
||||
- os: ubuntu-20.04
|
||||
compiler: clang11
|
||||
- os: ubuntu-20.04
|
||||
compiler: gcc # default in 20.04 is gcc 9
|
||||
- os: ubuntu-20.04
|
||||
compiler: gcc10
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler }}
|
||||
env:
|
||||
COMPILER: ${{ matrix.compiler }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (
|
||||
github.ref == 'refs/heads/master'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'file-cpp'
|
||||
|| github.event.label.name == 'file-cpp'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-build')
|
||||
|| github.event.label.name == 'run-build')
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: var/ccache
|
||||
key: ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ github.ref }}:${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ github.ref }}
|
||||
ccache:${{ matrix.os }}:${{ matrix.compiler }}
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
- name: Create conf/config.sh
|
||||
run: source ./apps/ci/ci-conf-core.sh
|
||||
- name: Process pending sql
|
||||
run: bash bin/acore-db-pendings
|
||||
- name: Build
|
||||
run: source ./apps/ci/ci-compile.sh
|
||||
- name: Dry run authserver
|
||||
run: source ./apps/ci/ci-dry-run.sh authserver
|
||||
- name: Dry run worldserver
|
||||
run: source ./apps/ci/ci-dry-run.sh worldserver
|
||||
- name: Check startup errors
|
||||
run: source ./apps/ci/ci-error-check.sh
|
||||
- name: Run unit tests
|
||||
run: source ./apps/ci/ci-run-unit-tests.sh
|
64
.github/workflows/core_modules_build.yml
vendored
Normal file
64
.github/workflows/core_modules_build.yml
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
name: core-modules-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: ['labeled', 'opened', 'synchronize', 'reopened']
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-modules:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# the result of the matrix will be the combination of all attributes, so we get os*compiler*modules builds
|
||||
os: [ubuntu-20.04]
|
||||
compiler: [clang]
|
||||
modules: [with]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler }}-${{ matrix.modules }}-modules
|
||||
env:
|
||||
COMPILER: ${{ matrix.compiler }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (
|
||||
github.ref == 'refs/heads/master'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'file-cpp'
|
||||
|| github.event.label.name == 'file-cpp'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-build')
|
||||
|| github.event.label.name == 'run-build')
|
||||
)
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Checkout modules
|
||||
run: ./apps/ci/ci-install-modules.sh
|
||||
if: matrix.modules == 'with'
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: var/ccache
|
||||
key: ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules:${{ github.ref }}:${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules:${{ github.ref }}
|
||||
ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
- name: Create conf/config.sh
|
||||
run: source ./apps/ci/ci-conf-core.sh
|
||||
- name: Process pending sql
|
||||
run: bash bin/acore-db-pendings
|
||||
- name: Build
|
||||
run: source ./apps/ci/ci-compile.sh
|
||||
- name: Dry run
|
||||
run: source ./apps/ci/ci-dry-run.sh worldserver
|
||||
- name: Check startup errors
|
||||
run: source ./apps/ci/ci-error-check.sh
|
||||
- name: Run unit tests
|
||||
run: source ./apps/ci/ci-run-unit-tests.sh
|
34
.github/workflows/cpp-check.yml
vendored
Normal file
34
.github/workflows/cpp-check.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: cpp-check
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- "master"
|
||||
paths:
|
||||
- src/**
|
||||
- "!README.md"
|
||||
- "!docs/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- src/**
|
||||
- "!README.md"
|
||||
- "!docs/**"
|
||||
|
||||
jobs:
|
||||
cpp-check:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
name: cpp check
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: cpp check
|
||||
run: |
|
||||
sudo apt update -y
|
||||
sudo apt install -y cppcheck
|
||||
cppcheck --force --inline-suppr --suppressions-list=./.suppress.cppcheck src/ --output-file=report.txt
|
||||
|
||||
if [ -s report.txt ]; then # if file is not empty
|
||||
cat report.txt
|
||||
exit 1 # let github action fails
|
||||
fi
|
180
.github/workflows/docker_build.yml
vendored
Normal file
180
.github/workflows/docker_build.yml
vendored
Normal file
@ -0,0 +1,180 @@
|
||||
name: docker-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: ['labeled', 'opened', 'synchronize', 'reopened']
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
docker-build-n-deploy-dev:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-20.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
env:
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
steps:
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: extract_branch
|
||||
|
||||
- name: Configure
|
||||
run: |
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
docker --version
|
||||
docker compose version
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
# we need the entire history for the ac-dev-server
|
||||
# with:
|
||||
# fetch-depth: 2
|
||||
|
||||
# - name: Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && steps.extract_branch.outputs.branch == 'master'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build Dev
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
env:
|
||||
#DOCKER_IMAGE_TAG: ${{ steps.extract_branch.outputs.branch }}
|
||||
DOCKER_CLIENT_TIMEOUT: 400
|
||||
COMPOSE_HTTP_TIMEOUT: 400
|
||||
run: |
|
||||
export DOCKER_USER_ID=$(id -u)
|
||||
export DOCKER_GROUP_ID=$(id -u)
|
||||
# pull the images first to load the docker cache layers
|
||||
#./acore.sh docker pull
|
||||
./acore.sh docker build
|
||||
output=$(./acore.sh version | grep "AzerothCore Rev. ") && version=${output#"AzerothCore Rev. "}
|
||||
DOCKER_IMAGE_TAG=$version docker compose --profile dev --profile local build
|
||||
|
||||
- name: Deploy Dev
|
||||
#env:
|
||||
# DOCKER_IMAGE_TAG: ${{ steps.extract_branch.outputs.branch }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && steps.extract_branch.outputs.branch == 'master'
|
||||
run: |
|
||||
docker compose --profile dev --profile local push
|
||||
output=$(./acore.sh version | grep "AzerothCore Rev. ") && version=${output#"AzerothCore Rev. "}
|
||||
DOCKER_IMAGE_TAG=$version docker compose --profile dev --profile local push
|
||||
|
||||
docker-build-n-deploy-prod:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-20.04]
|
||||
runs-on: ${{ matrix.os }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
env:
|
||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||
DOCKER_BUILDKIT: 1
|
||||
|
||||
steps:
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: extract_branch
|
||||
|
||||
- name: Configure
|
||||
run: |
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
docker --version
|
||||
docker compose version
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
# we need the entire history for the ac-dev-server
|
||||
# with:
|
||||
# fetch-depth: 2
|
||||
|
||||
# - name: Set up Docker Buildx
|
||||
# uses: docker/setup-buildx-action@v1
|
||||
|
||||
- name: Login to Docker Hub
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && steps.extract_branch.outputs.branch == 'master'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: var/docker/ccache
|
||||
key: ccache:${{ matrix.os }}:clang:without-modules:${{ github.ref }}:${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache:${{ matrix.os }}:clang:without-modules:${{ github.ref }}
|
||||
ccache:${{ matrix.os }}:clang:without-modules
|
||||
|
||||
- name: Build Production images
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
env:
|
||||
#DOCKER_IMAGE_TAG: ${{ steps.extract_branch.outputs.branch }}
|
||||
DOCKER_CLIENT_TIMEOUT: 220
|
||||
COMPOSE_HTTP_TIMEOUT: 220
|
||||
run: |
|
||||
export DOCKER_USER_ID=$(id -u)
|
||||
export DOCKER_GROUP_ID=$(id -u)
|
||||
# pull the images first to load the docker cache layers
|
||||
#./acore.sh docker prod:pull
|
||||
./acore.sh docker prod:build
|
||||
output=$(./acore.sh version | grep "AzerothCore Rev. ") && version=${output#"AzerothCore Rev. "}
|
||||
DOCKER_IMAGE_TAG=$version ./acore.sh docker prod:build
|
||||
# create the container to allow the copy right after
|
||||
docker compose create ac-build-prod
|
||||
docker compose cp ac-build-prod:/azerothcore/var/ccache var/docker/
|
||||
echo "ccache exported"
|
||||
|
||||
- name: Deploy Production images
|
||||
#env:
|
||||
# DOCKER_IMAGE_TAG: ${{ steps.extract_branch.outputs.branch }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && steps.extract_branch.outputs.branch == 'master'
|
||||
run: |
|
||||
docker compose --profile prod push
|
||||
output=$(./acore.sh version | grep "AzerothCore Rev. ") && version=${output#"AzerothCore Rev. "}
|
||||
DOCKER_IMAGE_TAG=$version docker compose --profile prod push
|
||||
|
||||
|
||||
dispatch-acore-docker:
|
||||
needs: [ docker-build-n-deploy-prod , docker-build-n-deploy-dev]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Extract branch name
|
||||
shell: bash
|
||||
run: echo "##[set-output name=branch;]$(echo ${GITHUB_REF#refs/heads/})"
|
||||
id: extract_branch
|
||||
|
||||
- name: Repository Dispatch
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && steps.extract_branch.outputs.branch == 'master'
|
||||
uses: peter-evans/repository-dispatch@v2
|
||||
with:
|
||||
token: ${{ secrets.ACORE_DOCKER_REPO_ACCESS_TOKEN }}
|
||||
repository: azerothcore/acore-docker
|
||||
event-type: azerothcore-new-images
|
||||
client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}"}'
|
49
.github/workflows/import_pending.yml
vendored
Normal file
49
.github/workflows/import_pending.yml
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
name: import-pending
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
import-pending:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
runs-on: ubuntu-20.04
|
||||
permissions: write-all
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
# If we're fetching all the history in a later step it makes sense to
|
||||
# pre-load it now
|
||||
fetch-depth: 0
|
||||
ref: ${{ github.ref_name }}
|
||||
|
||||
- uses: denoland/setup-deno@v1
|
||||
with:
|
||||
# Specifies latest 1.x
|
||||
deno-version: "~1.0"
|
||||
|
||||
- name: Import and commit pending sql
|
||||
run: |
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "github-actions[bot]"
|
||||
# Get the latest changes from git
|
||||
git pull --rebase origin "${{ github.ref_name }}"
|
||||
bash bin/acore-db-pendings
|
||||
deno run --allow-all --unstable apps/ci/ci-pending-changelogs.ts
|
||||
git add -A .
|
||||
git commit -am "chore(DB): import pending files" -m "Referenced commit(s): ${GITHUB_SHA}" || true
|
||||
env:
|
||||
# Noting that the branch name can only be master, as per the event
|
||||
# triggering this action
|
||||
BRANCH: ${{ github.ref_name }}
|
||||
|
||||
- name: Push changes
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.AC_GITHUB_TOKEN }}
|
||||
# Noting that the branch name can only be master, as per the event
|
||||
# triggering this action
|
||||
branch: ${{ github.ref_name }}
|
16
.github/workflows/issue-labeler.yml
vendored
Normal file
16
.github/workflows/issue-labeler.yml
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
name: "Issue Labeler"
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
issue_labeler:
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
runs-on: ubuntu-latest
|
||||
name: Issue Labeler
|
||||
steps:
|
||||
- name: Issue Labeler
|
||||
id: issue-labeler
|
||||
uses: azerothcore/GitHub-Actions@issue-labeler-1.0.1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
44
.github/workflows/macos_build.yml
vendored
Normal file
44
.github/workflows/macos_build.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: macos-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: ['labeled', 'opened', 'synchronize', 'reopened']
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
macos-build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os:
|
||||
- macos-11
|
||||
- macos-12
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/Library/Caches/ccache
|
||||
key: ccache:${{ matrix.os }}:${{ github.ref }}:${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache:${{ matrix.os }}:${{ github.ref }}
|
||||
ccache:${{ matrix.os }}
|
||||
- name: Install latest bash
|
||||
run: brew install bash
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
- name: Build
|
||||
run: source ./apps/ci/mac/ci-compile.sh
|
||||
- name: Run unit tests
|
||||
run: source ./apps/ci/ci-run-unit-tests.sh
|
18
.github/workflows/pr_labeler.yml
vendored
Normal file
18
.github/workflows/pr_labeler.yml
vendored
Normal file
@ -0,0 +1,18 @@
|
||||
name: PR Labeler
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
runs-on: ubuntu-20.04
|
||||
permissions: write-all
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: true
|
||||
- uses: actions/labeler@v4
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
configuration-path: .github/labeler.yml
|
||||
sync-labels: true
|
46
.github/workflows/tools_build.yml
vendored
Normal file
46
.github/workflows/tools_build.yml
vendored
Normal file
@ -0,0 +1,46 @@
|
||||
name: tools
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# the result of the matrix will be the combination of all attributes, so we get os*compiler builds
|
||||
os: [ubuntu-20.04]
|
||||
compiler: [clang]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}-${{ matrix.compiler }}
|
||||
env:
|
||||
COMPILER: ${{ matrix.compiler }}
|
||||
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Cache
|
||||
uses: actions/cache@v3
|
||||
env:
|
||||
cache-name: cache-tools
|
||||
with:
|
||||
path: var/ccache
|
||||
key: ${{ env.cache-name }}-${{ matrix.os }}-${{ matrix.compiler }}-${{ github.ref }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
${{ env.cache-name }}-${{ matrix.os }}-${{ matrix.compiler }}-${{ github.ref }}-
|
||||
${{ env.cache-name }}-${{ matrix.os }}-${{ matrix.compiler }}-
|
||||
${{ env.cache-name }}-${{ matrix.os }}-
|
||||
- name: Configure OS
|
||||
run: source ./acore.sh install-deps
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
- name: Create conf/config.sh
|
||||
run: source ./apps/ci/ci-conf-tools.sh
|
||||
- name: Build
|
||||
run: source ./apps/ci/ci-compile.sh
|
||||
|
62
.github/workflows/windows_build.yml
vendored
Normal file
62
.github/workflows/windows_build.yml
vendored
Normal file
@ -0,0 +1,62 @@
|
||||
name: windows-build
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'master'
|
||||
pull_request:
|
||||
types: ['labeled', 'opened', 'synchronize', 'reopened']
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
windows-build:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [windows-latest]
|
||||
runs-on: ${{ matrix.os }}
|
||||
name: ${{ matrix.os }}
|
||||
env:
|
||||
BOOST_ROOT: C:\local\boost_1_82_0
|
||||
if: |
|
||||
github.repository == 'azerothcore/azerothcore-wotlk'
|
||||
&& !github.event.pull_request.draft
|
||||
&& (github.ref == 'refs/heads/master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: ccache
|
||||
uses: hendrikmuhs/ccache-action@v1.2.9
|
||||
- name: Configure OS
|
||||
shell: bash
|
||||
env:
|
||||
CONTINUOUS_INTEGRATION: true
|
||||
run: |
|
||||
./acore.sh install-deps
|
||||
- name: Process pending sql
|
||||
shell: bash
|
||||
run: bash bin/acore-db-pendings
|
||||
- name: Build
|
||||
shell: bash
|
||||
run: |
|
||||
export CTOOLS_BUILD=all
|
||||
./acore.sh compiler build
|
||||
- name: Copy dll files
|
||||
shell: bash
|
||||
run: |
|
||||
cp "/c/Program Files/OpenSSL/bin/legacy.dll" "env/dist"
|
||||
- name: Dry run authserver
|
||||
shell: bash
|
||||
run: |
|
||||
source ./apps/ci/ci-gen-server-conf-files.sh "authserver" "configs" "."
|
||||
cd env/dist
|
||||
./authserver -dry-run
|
||||
- name: Dry run worldserver
|
||||
shell: bash
|
||||
run: |
|
||||
source ./apps/ci/ci-gen-server-conf-files.sh "worldserver" "configs" "."
|
||||
cd env/dist
|
||||
./worldserver -dry-run
|
||||
- name: Stop MySQL
|
||||
run: net stop mysql
|
93
.gitignore
vendored
Normal file
93
.gitignore
vendored
Normal file
@ -0,0 +1,93 @@
|
||||
#
|
||||
# AzerothCore
|
||||
#
|
||||
|
||||
/conf/*
|
||||
!/conf/dist
|
||||
/modules/*
|
||||
!/modules/*.md
|
||||
!/modules/*.sh
|
||||
!/modules/CMakeLists.txt
|
||||
!/modules/*.h
|
||||
!/modules/*.cmake
|
||||
/build*/
|
||||
/var/*
|
||||
!/var/build/.gitkeep
|
||||
!/var/ccache/.gitkeep
|
||||
/env/dist/*
|
||||
!/env/dist/.gitkeep
|
||||
/env/user/*
|
||||
/.env*
|
||||
/apps/joiner
|
||||
/deps/deno
|
||||
/data/sql/custom/*
|
||||
/src/server/scripts/Custom/*
|
||||
!/src/server/scripts/Custom/README.md
|
||||
|
||||
/docker-compose.override.yml
|
||||
|
||||
!.gitkeep
|
||||
|
||||
#
|
||||
#Generic
|
||||
#
|
||||
|
||||
.directory
|
||||
.mailmap
|
||||
*.orig
|
||||
*.rej
|
||||
*~
|
||||
.hg/
|
||||
*.kdev*
|
||||
.DS_Store
|
||||
CMakeLists.txt.user
|
||||
*.bak
|
||||
*.patch
|
||||
*.diff
|
||||
*.REMOTE.*
|
||||
*.BACKUP.*
|
||||
*.BASE.*
|
||||
*.LOCAL.*
|
||||
|
||||
#
|
||||
# IDE & other software
|
||||
#
|
||||
/.settings/
|
||||
/.externalToolBuilders/*
|
||||
# exclude in all levels
|
||||
nbproject/
|
||||
.sync.ffs_db
|
||||
*.kate-swp
|
||||
.browse.VC*
|
||||
.idea
|
||||
cmake-build-*/*
|
||||
coverage-report/
|
||||
|
||||
#
|
||||
# Eclipse
|
||||
#
|
||||
*.pydevproject
|
||||
.metadata
|
||||
.gradle
|
||||
tmp/
|
||||
*.tmp
|
||||
*.swp
|
||||
*~.nib
|
||||
local.properties
|
||||
.settings/
|
||||
.loadpath
|
||||
.project
|
||||
.cproject
|
||||
|
||||
|
||||
# ==================
|
||||
#
|
||||
# CUSTOM
|
||||
#
|
||||
# put below your custom ignore rules
|
||||
# for example , if you want to include a
|
||||
# module directly in repositoryyou can do:
|
||||
#
|
||||
# !modules/yourmodule
|
||||
#
|
||||
# ==================
|
1
.suppress.cppcheck
Normal file
1
.suppress.cppcheck
Normal file
@ -0,0 +1 @@
|
||||
cppcheckError
|
16
.vscode/extensions.json
vendored
Normal file
16
.vscode/extensions.json
vendored
Normal file
@ -0,0 +1,16 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-vscode-remote.remote-containers",
|
||||
"notskm.clang-tidy",
|
||||
"xaver.clang-format",
|
||||
"bbenoist.doxygen",
|
||||
"ms-vscode.cpptools",
|
||||
"austin.code-gnu-global",
|
||||
"twxs.cmake",
|
||||
"mhutchie.git-graph",
|
||||
"github.vscode-pull-request-github",
|
||||
"eamodio.gitlens",
|
||||
"cschlosser.doxdocgen",
|
||||
"sanaajani.taskrunnercode"
|
||||
]
|
||||
}
|
68
.vscode/launch.json
vendored
Normal file
68
.vscode/launch.json
vendored
Normal file
@ -0,0 +1,68 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Linux/Docker debug",
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": "/azerothcore/env/dist/bin/worldserver",
|
||||
"cwd": "/azerothcore",
|
||||
"args": [],
|
||||
"environment": [],
|
||||
"externalConsole": false,
|
||||
"sourceFileMap": {
|
||||
"/azerothcore": "${workspaceFolder}"
|
||||
},
|
||||
"linux": {
|
||||
"MIMode": "gdb",
|
||||
"miDebuggerPath": "/usr/bin/gdb",
|
||||
"setupCommands": [
|
||||
{
|
||||
"description": "Enable pretty-printing for gdb",
|
||||
"text": "-enable-pretty-printing",
|
||||
"ignoreFailures": false
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "(docker run) Pipe Launch",
|
||||
"type": "cppdbg",
|
||||
"request": "launch",
|
||||
"program": "/azerothcore/env/dist/bin/worldserver",
|
||||
"cwd": "/azerothcore",
|
||||
"args": [],
|
||||
"environment": [],
|
||||
"externalConsole": true,
|
||||
"pipeTransport": {
|
||||
"debuggerPath": "/usr/bin/gdb",
|
||||
"pipeProgram": "docker compose",
|
||||
"pipeArgs": [
|
||||
"exec", "-T", "ac-worldserver", "sh", "-c"
|
||||
],
|
||||
"pipeCwd": "${workspaceFolder}"
|
||||
},
|
||||
"sourceFileMap": {
|
||||
"/azerothcore": "${workspaceFolder}"
|
||||
},
|
||||
"linux": {
|
||||
"MIMode": "gdb",
|
||||
"miDebuggerPath": "/usr/bin/gdb",
|
||||
"setupCommands": [
|
||||
{
|
||||
"description": "Enable pretty-printing for gdb",
|
||||
"text": "-enable-pretty-printing",
|
||||
"ignoreFailures": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"osx": {
|
||||
"MIMode": "lldb"
|
||||
},
|
||||
"windows": {
|
||||
"MIMode": "gdb",
|
||||
"miDebuggerPath": "C:\\MinGw\\bin\\gdb.exe"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
122
.vscode/settings.json
vendored
Normal file
122
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,122 @@
|
||||
{
|
||||
"files.associations": {
|
||||
"*.dist": "properties",
|
||||
"*.crash": "properties",
|
||||
"*.wtf": "properties",
|
||||
"*.cnf": "properties",
|
||||
"array": "cpp",
|
||||
"atomic": "cpp",
|
||||
"bit": "cpp",
|
||||
"*.tcc": "cpp",
|
||||
"bitset": "cpp",
|
||||
"cctype": "cpp",
|
||||
"chrono": "cpp",
|
||||
"cinttypes": "cpp",
|
||||
"clocale": "cpp",
|
||||
"cmath": "cpp",
|
||||
"complex": "cpp",
|
||||
"condition_variable": "cpp",
|
||||
"csignal": "cpp",
|
||||
"cstdarg": "cpp",
|
||||
"cstddef": "cpp",
|
||||
"cstdint": "cpp",
|
||||
"cstdio": "cpp",
|
||||
"cstdlib": "cpp",
|
||||
"cstring": "cpp",
|
||||
"ctime": "cpp",
|
||||
"cwchar": "cpp",
|
||||
"cwctype": "cpp",
|
||||
"deque": "cpp",
|
||||
"list": "cpp",
|
||||
"map": "cpp",
|
||||
"set": "cpp",
|
||||
"unordered_map": "cpp",
|
||||
"unordered_set": "cpp",
|
||||
"vector": "cpp",
|
||||
"exception": "cpp",
|
||||
"algorithm": "cpp",
|
||||
"functional": "cpp",
|
||||
"iterator": "cpp",
|
||||
"memory": "cpp",
|
||||
"memory_resource": "cpp",
|
||||
"numeric": "cpp",
|
||||
"optional": "cpp",
|
||||
"random": "cpp",
|
||||
"ratio": "cpp",
|
||||
"string": "cpp",
|
||||
"string_view": "cpp",
|
||||
"system_error": "cpp",
|
||||
"tuple": "cpp",
|
||||
"type_traits": "cpp",
|
||||
"utility": "cpp",
|
||||
"fstream": "cpp",
|
||||
"initializer_list": "cpp",
|
||||
"iomanip": "cpp",
|
||||
"iosfwd": "cpp",
|
||||
"iostream": "cpp",
|
||||
"istream": "cpp",
|
||||
"limits": "cpp",
|
||||
"mutex": "cpp",
|
||||
"new": "cpp",
|
||||
"ostream": "cpp",
|
||||
"shared_mutex": "cpp",
|
||||
"sstream": "cpp",
|
||||
"stdexcept": "cpp",
|
||||
"streambuf": "cpp",
|
||||
"thread": "cpp",
|
||||
"cfenv": "cpp",
|
||||
"typeinfo": "cpp",
|
||||
"codecvt": "cpp",
|
||||
"xstring": "cpp",
|
||||
"variant": "cpp",
|
||||
"any": "cpp",
|
||||
"barrier": "cpp",
|
||||
"charconv": "cpp",
|
||||
"compare": "cpp",
|
||||
"concepts": "cpp",
|
||||
"coroutine": "cpp",
|
||||
"csetjmp": "cpp",
|
||||
"execution": "cpp",
|
||||
"filesystem": "cpp",
|
||||
"format": "cpp",
|
||||
"forward_list": "cpp",
|
||||
"future": "cpp",
|
||||
"ios": "cpp",
|
||||
"latch": "cpp",
|
||||
"locale": "cpp",
|
||||
"numbers": "cpp",
|
||||
"queue": "cpp",
|
||||
"ranges": "cpp",
|
||||
"regex": "cpp",
|
||||
"scoped_allocator": "cpp",
|
||||
"semaphore": "cpp",
|
||||
"source_location": "cpp",
|
||||
"span": "cpp",
|
||||
"stack": "cpp",
|
||||
"stop_token": "cpp",
|
||||
"strstream": "cpp",
|
||||
"syncstream": "cpp",
|
||||
"typeindex": "cpp",
|
||||
"valarray": "cpp",
|
||||
"xfacet": "cpp",
|
||||
"xhash": "cpp",
|
||||
"xiosbase": "cpp",
|
||||
"xlocale": "cpp",
|
||||
"xlocbuf": "cpp",
|
||||
"xlocinfo": "cpp",
|
||||
"xlocmes": "cpp",
|
||||
"xlocmon": "cpp",
|
||||
"xlocnum": "cpp",
|
||||
"xloctime": "cpp",
|
||||
"xmemory": "cpp",
|
||||
"xstddef": "cpp",
|
||||
"xtr1common": "cpp",
|
||||
"xtree": "cpp",
|
||||
"xutility": "cpp",
|
||||
"*.ipp": "cpp",
|
||||
"resumable": "cpp"
|
||||
},
|
||||
"deno.enable": true,
|
||||
"deno.path": "deps/deno/bin/deno",
|
||||
"deno.lint": true
|
||||
}
|
85
.vscode/tasks.json
vendored
Normal file
85
.vscode/tasks.json
vendored
Normal file
@ -0,0 +1,85 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "AzerothCore: Dashboard",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Import/update database",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh db-assembler import-all",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: download client-data",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh client-data",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Clean build",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh compiler clean",
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Build",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh compiler build",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Run authserver (restarter)",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh run-authserver",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "AzerothCore: Run worldserver (restarter)",
|
||||
"type": "shell",
|
||||
"command": "./acore.sh run-worldserver",
|
||||
"group": "none",
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
36
AUTHORS
Normal file
36
AUTHORS
Normal file
@ -0,0 +1,36 @@
|
||||
# List of AUTHORS who contributed over time to the AzerothCore project
|
||||
|
||||
## Warning
|
||||
The code of AzerothCore is shipped as it is without any form of warranty,
|
||||
and - except for third party libraries licensed under the AGPL 3,
|
||||
which you can read from the file "LICENSE-AGPL3".
|
||||
|
||||
## Point of current development
|
||||
The project is currently hosted at https://www.azerothcore.org/ and developed under https://github.com/azerothcore
|
||||
|
||||
## History of development
|
||||
Development of this project dates back to 2004, and was developed under various umbrellas over time:
|
||||
* WoW Daemon Team, 2004
|
||||
* MaNGOS project, 2005-2008, located at http://www.mangosproject.org
|
||||
* MaNGOS project, 2008-2011, located at http://getmangos.com
|
||||
* SD2 project, 2008-2009, located at http://www.scriptdev2.com/
|
||||
* TrinityCore, 2008-2012, located at https://www.trinitycore.org/
|
||||
* SunwellCore 2012-2016, privately developed, more info at https://www.azerothcore.org/pages/sunwell.pl/
|
||||
* AzerothCore, 2016-2023, located at https://www.azerothcore.org/
|
||||
|
||||
## Authorship of the code
|
||||
Authorship is assigned for each commit within the git history, which is stored in these git repositories:
|
||||
* github.com/cmangos/mangos-svn (History from MaNGOS project from 2005-2008, originally hosted at http://mangos.svn.sourceforge.net)
|
||||
* github.com/TrinityCore/TrinityCore
|
||||
* github.com/azerothcore/azerothcore-wotlk
|
||||
|
||||
Unfortunately we have no detailed information of the history of the WoWD project;
|
||||
if somebody can provide information, please contact us, so that we can make this history available
|
||||
|
||||
SunwellCore developed privately and has no git history.
|
||||
|
||||
## Exceptions with third party libraries
|
||||
The third party libraries have their own way of addressing authorship, and the authorship of commits importing/ updating
|
||||
a third party library reflects who did the importing instead of who wrote the code within the commit.
|
||||
|
||||
The Authors of third party libraries are not explicitly mentioned, and usually are possible to obtain from the files belonging to the third party libraries.
|
173
CMakeLists.txt
Normal file
173
CMakeLists.txt
Normal file
@ -0,0 +1,173 @@
|
||||
#
|
||||
# This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
#
|
||||
# This file is free software; as a special exception the author gives
|
||||
# unlimited permission to copy and/or distribute it, with or without
|
||||
# modifications, as long as this notice is preserved.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
#
|
||||
|
||||
# Require version Cmake
|
||||
cmake_minimum_required(VERSION 3.16...3.22)
|
||||
|
||||
message(STATUS "CMake version: ${CMAKE_VERSION}")
|
||||
|
||||
# CMake policies (can not be handled elsewhere)
|
||||
cmake_policy(SET CMP0005 NEW)
|
||||
|
||||
# Set projectname (must be done AFTER setting configurationtypes)
|
||||
project(AzerothCore VERSION 3.0.0 LANGUAGES CXX C)
|
||||
|
||||
# add this options before PROJECT keyword
|
||||
set(CMAKE_DISABLE_SOURCE_CHANGES ON)
|
||||
set(CMAKE_DISABLE_IN_SOURCE_BUILD ON)
|
||||
|
||||
# Set RPATH-handing (CMake parameters)
|
||||
set(CMAKE_SKIP_BUILD_RPATH 0)
|
||||
set(CMAKE_BUILD_WITH_INSTALL_RPATH 0)
|
||||
set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")
|
||||
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH 1)
|
||||
|
||||
set(AC_PATH_ROOT "${CMAKE_SOURCE_DIR}")
|
||||
|
||||
# set macro-directory
|
||||
list(APPEND CMAKE_MODULE_PATH
|
||||
"${CMAKE_SOURCE_DIR}/src/cmake/macros")
|
||||
|
||||
include(CheckCXXSourceRuns)
|
||||
include(CheckIncludeFiles)
|
||||
include(ConfigureScripts)
|
||||
include(ConfigureModules)
|
||||
include(ConfigureApplications)
|
||||
include(ConfigureTools)
|
||||
|
||||
# some utils for cmake
|
||||
include(deps/acore/cmake-utils/utils.cmake)
|
||||
|
||||
include(src/cmake/ac_macros.cmake)
|
||||
|
||||
# set default buildoptions and print them
|
||||
include(conf/dist/config.cmake)
|
||||
|
||||
# load custom configurations for cmake if exists
|
||||
if(EXISTS "${CMAKE_SOURCE_DIR}/conf/config.cmake")
|
||||
include(conf/config.cmake)
|
||||
endif()
|
||||
|
||||
#
|
||||
# Loading dyn modules
|
||||
#
|
||||
|
||||
# add modules and dependencies
|
||||
CU_SUBDIRLIST(sub_DIRS "${CMAKE_SOURCE_DIR}/modules" FALSE FALSE)
|
||||
FOREACH(subdir ${sub_DIRS})
|
||||
|
||||
get_filename_component(MODULENAME ${subdir} NAME)
|
||||
|
||||
if (";${DISABLED_AC_MODULES};" MATCHES ";${MODULENAME};")
|
||||
continue()
|
||||
endif()
|
||||
|
||||
STRING(REPLACE "${CMAKE_SOURCE_DIR}/" "" subdir_rel ${subdir})
|
||||
if(EXISTS "${subdir}/CMakeLists.txt")
|
||||
add_subdirectory("${subdir_rel}")
|
||||
endif()
|
||||
ENDFOREACH()
|
||||
|
||||
CU_RUN_HOOK("AFTER_LOAD_CONF")
|
||||
|
||||
# build in Release-mode by default if not explicitly set
|
||||
if(NOT CMAKE_BUILD_TYPE)
|
||||
set(CMAKE_BUILD_TYPE "RelWithDebInfo")
|
||||
endif()
|
||||
|
||||
# turn off PCH totally if enabled (hidden setting, mainly for devs)
|
||||
if( NOPCH )
|
||||
set(USE_COREPCH 0)
|
||||
set(USE_SCRIPTPCH 0)
|
||||
endif()
|
||||
|
||||
include(ConfigureBaseTargets)
|
||||
include(CheckPlatform)
|
||||
include(GroupSources)
|
||||
include(AutoCollect)
|
||||
include(ConfigInstall)
|
||||
|
||||
CU_RUN_HOOK("AFTER_LOAD_CMAKE_MODULES")
|
||||
|
||||
find_package(PCHSupport)
|
||||
find_package(MySQL REQUIRED)
|
||||
|
||||
if(UNIX AND WITH_PERFTOOLS)
|
||||
find_package(Gperftools)
|
||||
endif()
|
||||
|
||||
if(NOT WITHOUT_GIT)
|
||||
find_package(Git)
|
||||
endif()
|
||||
|
||||
# Find revision ID and hash of the sourcetree
|
||||
include(src/cmake/genrev.cmake)
|
||||
|
||||
# print out the results before continuing
|
||||
include(src/cmake/showoptions.cmake)
|
||||
|
||||
#
|
||||
# Loading framework
|
||||
#
|
||||
|
||||
add_subdirectory(deps)
|
||||
add_subdirectory(src/common)
|
||||
|
||||
#
|
||||
# Loading application sources
|
||||
#
|
||||
|
||||
CU_RUN_HOOK("BEFORE_SRC_LOAD")
|
||||
|
||||
# add core sources
|
||||
add_subdirectory(src)
|
||||
|
||||
if (BUILD_APPLICATION_WORLDSERVER)
|
||||
# add modules sources
|
||||
add_subdirectory(modules)
|
||||
endif()
|
||||
|
||||
CU_RUN_HOOK("AFTER_SRC_LOAD")
|
||||
|
||||
if (BUILD_TESTING AND BUILD_APPLICATION_WORLDSERVER)
|
||||
# we use these flags to get code coverage
|
||||
set(UNIT_TEST_CXX_FLAGS "-fprofile-arcs -ftest-coverage -fno-inline")
|
||||
|
||||
# enable additional flags for GCC.
|
||||
if ( CMAKE_CXX_COMPILER_ID MATCHES GNU )
|
||||
set(UNIT_TEST_CXX_FLAGS "${UNIT_TEST_CXX_FLAGS} -fno-inline-small-functions -fno-default-inline")
|
||||
endif()
|
||||
|
||||
message("Unit tests code coverage: enabling ${UNIT_TEST_CXX_FLAGS}")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${UNIT_TEST_CXX_FLAGS}")
|
||||
|
||||
include(src/cmake/googletest.cmake)
|
||||
fetch_googletest(
|
||||
${PROJECT_SOURCE_DIR}/src/cmake
|
||||
${PROJECT_BINARY_DIR}/googletest
|
||||
)
|
||||
|
||||
enable_testing()
|
||||
add_subdirectory(src/test)
|
||||
|
||||
add_custom_target(coverage DEPENDS coverage_command)
|
||||
|
||||
add_custom_command(OUTPUT coverage_command
|
||||
# Run unit tests.
|
||||
COMMAND ctest
|
||||
# Run the graphical front-end for code coverage.
|
||||
COMMAND lcov --directory src --capture --output-file coverage.info
|
||||
COMMAND lcov --remove coverage.info '/usr/*' '${CMAKE_BINARY_DIR}/googletest/*' '${CMAKE_CURRENT_SOURCE_DIR}/src/test/*' --output-file coverage.info
|
||||
COMMAND genhtml -o ${CMAKE_CURRENT_SOURCE_DIR}/coverage-report coverage.info
|
||||
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
|
||||
)
|
||||
endif()
|
661
LICENSE
Normal file
661
LICENSE
Normal file
@ -0,0 +1,661 @@
|
||||
GNU AFFERO GENERAL PUBLIC LICENSE
|
||||
Version 3, 19 November 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU Affero General Public License is a free, copyleft license for
|
||||
software and other kinds of works, specifically designed to ensure
|
||||
cooperation with the community in the case of network server software.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
our General Public Licenses are intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
Developers that use our General Public Licenses protect your rights
|
||||
with two steps: (1) assert copyright on the software, and (2) offer
|
||||
you this License which gives you legal permission to copy, distribute
|
||||
and/or modify the software.
|
||||
|
||||
A secondary benefit of defending all users' freedom is that
|
||||
improvements made in alternate versions of the program, if they
|
||||
receive widespread use, become available for other developers to
|
||||
incorporate. Many developers of free software are heartened and
|
||||
encouraged by the resulting cooperation. However, in the case of
|
||||
software used on network servers, this result may fail to come about.
|
||||
The GNU General Public License permits making a modified version and
|
||||
letting the public access it on a server without ever releasing its
|
||||
source code to the public.
|
||||
|
||||
The GNU Affero General Public License is designed specifically to
|
||||
ensure that, in such cases, the modified source code becomes available
|
||||
to the community. It requires the operator of a network server to
|
||||
provide the source code of the modified version running there to the
|
||||
users of that server. Therefore, public use of a modified version, on
|
||||
a publicly accessible server, gives the public access to the source
|
||||
code of the modified version.
|
||||
|
||||
An older license, called the Affero General Public License and
|
||||
published by Affero, was designed to accomplish similar goals. This is
|
||||
a different license, not a version of the Affero GPL, but Affero has
|
||||
released a new version of the Affero GPL which permits relicensing under
|
||||
this license.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU Affero General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Remote Network Interaction; Use with the GNU General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, if you modify the
|
||||
Program, your modified version must prominently offer all users
|
||||
interacting with it remotely through a computer network (if your version
|
||||
supports such interaction) an opportunity to receive the Corresponding
|
||||
Source of your version by providing access to the Corresponding Source
|
||||
from a network server at no charge, through some standard or customary
|
||||
means of facilitating copying of software. This Corresponding Source
|
||||
shall include the Corresponding Source for any work covered by version 3
|
||||
of the GNU General Public License that is incorporated pursuant to the
|
||||
following paragraph.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the work with which it is combined will remain governed by version
|
||||
3 of the GNU General Public License.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU Affero General Public License from time to time. Such new versions
|
||||
will be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU Affero General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU Affero General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU Affero General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published
|
||||
by the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If your software can interact with users remotely through a computer
|
||||
network, you should also make sure that it provides a way for users to
|
||||
get its source. For example, if your program is a web application, its
|
||||
interface could display a "Source" link that leads users to an archive
|
||||
of the code. There are many ways you could offer source, and different
|
||||
solutions will be better for different programs; see section 13 for the
|
||||
specific requirements.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU AGPL, see
|
||||
<http://www.gnu.org/licenses/>.
|
23
PreLoad.cmake
Normal file
23
PreLoad.cmake
Normal file
@ -0,0 +1,23 @@
|
||||
# Copyright (C)
|
||||
#
|
||||
# This file is free software; as a special exception the author gives
|
||||
# unlimited permission to copy and/or distribute it, with or without
|
||||
# modifications, as long as this notice is preserved.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
|
||||
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
|
||||
# This file is run right before CMake starts configuring the sourcetree
|
||||
|
||||
# Example: Force CMAKE_INSTALL_PREFIX to be preloaded with something before
|
||||
# doing the actual first "configure"-part - allows for hardforcing
|
||||
# destinations elsewhere in the CMake buildsystem (commented out on purpose)
|
||||
|
||||
# Override CMAKE_INSTALL_PREFIX on Windows platforms
|
||||
#if( WIN32 )
|
||||
# if( NOT CYGWIN )
|
||||
# set(CMAKE_INSTALL_PREFIX
|
||||
# "" CACHE PATH "Default install path")
|
||||
# endif()
|
||||
#endif()
|
5
acore.json
Normal file
5
acore.json
Normal file
@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "azerothcore-wotlk",
|
||||
"version": "7.0.0-dev.1",
|
||||
"license": "AGPL3"
|
||||
}
|
8
acore.sh
Normal file
8
acore.sh
Normal file
@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
[ -z "$WITH_ERRORS" ] && set -e
|
||||
|
||||
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
|
||||
source "$CUR_PATH/apps/installer/main.sh"
|
163
apps/EnumUtils/enumutils_describe.py
Normal file
163
apps/EnumUtils/enumutils_describe.py
Normal file
@ -0,0 +1,163 @@
|
||||
from re import compile, MULTILINE
|
||||
from os import walk, getcwd
|
||||
|
||||
notice = ('''/*
|
||||
* This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Affero General Public License as published by the
|
||||
* Free Software Foundation; either version 3 of the License, or (at your
|
||||
* option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
''')
|
||||
|
||||
if not getcwd().endswith('src'):
|
||||
print('Run this from the src directory!')
|
||||
print('(Invoke as \'python ../apps/EnumUtils/enumutils_describe.py\')')
|
||||
exit(1)
|
||||
|
||||
EnumPattern = compile(r'//\s*EnumUtils: DESCRIBE THIS(?:\s*\(in ([^\)]+)\))?\s+enum\s+([0-9A-Za-z]+)[^\n]*\s*{([^}]+)};')
|
||||
EnumValuesPattern = compile(r'\s+\S.+?(,|$)[^\n]*')
|
||||
EnumValueNamePattern = compile(r'^\s*([a-zA-Z0-9_]+)', flags=MULTILINE)
|
||||
EnumValueSkipLinePattern = compile(r'^\s*//')
|
||||
EnumValueCommentPattern = compile(r'//,?[ \t]*([^\n]+)$')
|
||||
CommentMatchFormat = compile(r'^(((TITLE +(.+?))|(DESCRIPTION +(.+?))) *){1,2}$')
|
||||
CommentSkipFormat = compile(r'^SKIP *$')
|
||||
|
||||
def strescape(str):
|
||||
res = ''
|
||||
for char in str:
|
||||
if char in ('\\', '"') or not (32 <= ord(char) < 127):
|
||||
res += ('\\%03o' % ord(char))
|
||||
else:
|
||||
res += char
|
||||
return '"' + res + '"'
|
||||
|
||||
def processFile(path, filename):
|
||||
input = open('%s/%s.h' % (path, filename),'r')
|
||||
if input is None:
|
||||
print('Failed to open %s.h' % filename)
|
||||
return
|
||||
|
||||
file = input.read()
|
||||
|
||||
enums = []
|
||||
for enum in EnumPattern.finditer(file):
|
||||
prefix = enum.group(1) or ''
|
||||
name = enum.group(2)
|
||||
values = []
|
||||
for value in EnumValuesPattern.finditer(enum.group(3)):
|
||||
valueData = value.group(0)
|
||||
|
||||
valueNameMatch = EnumValueNamePattern.search(valueData)
|
||||
if valueNameMatch is None:
|
||||
if EnumValueSkipLinePattern.search(valueData) is None:
|
||||
print('Name of value not found: %s' % repr(valueData))
|
||||
continue
|
||||
valueName = valueNameMatch.group(1)
|
||||
|
||||
valueCommentMatch = EnumValueCommentPattern.search(valueData)
|
||||
valueComment = None
|
||||
if valueCommentMatch:
|
||||
valueComment = valueCommentMatch.group(1)
|
||||
|
||||
valueTitle = None
|
||||
valueDescription = None
|
||||
|
||||
if valueComment is not None:
|
||||
if CommentSkipFormat.match(valueComment) is not None:
|
||||
continue
|
||||
commentMatch = CommentMatchFormat.match(valueComment)
|
||||
if commentMatch is not None:
|
||||
valueTitle = commentMatch.group(4)
|
||||
valueDescription = commentMatch.group(6)
|
||||
else:
|
||||
valueDescription = valueComment
|
||||
|
||||
if valueTitle is None:
|
||||
valueTitle = valueName
|
||||
if valueDescription is None:
|
||||
valueDescription = ''
|
||||
|
||||
values.append((valueName, valueTitle, valueDescription))
|
||||
|
||||
enums.append((prefix + name, prefix, values))
|
||||
print('%s.h: Enum %s parsed with %d values' % (filename, name, len(values)))
|
||||
|
||||
if not enums:
|
||||
return
|
||||
|
||||
print('Done parsing %s.h (in %s)\n' % (filename, path))
|
||||
output = open('%s/enuminfo_%s.cpp' % (path, filename), 'w')
|
||||
if output is None:
|
||||
print('Failed to create enuminfo_%s.cpp' % filename)
|
||||
return
|
||||
|
||||
# write output file
|
||||
output.write(notice)
|
||||
output.write('#include "%s.h"\n' % filename)
|
||||
output.write('#include "Define.h"\n')
|
||||
output.write('#include "SmartEnum.h"\n')
|
||||
output.write('#include <stdexcept>\n')
|
||||
output.write('\n')
|
||||
output.write('namespace Acore::Impl::EnumUtilsImpl\n')
|
||||
output.write('{\n')
|
||||
for name, prefix, values in enums:
|
||||
tag = ('data for enum \'%s\' in \'%s.h\' auto-generated' % (name, filename))
|
||||
output.write('\n')
|
||||
output.write('/*' + ('*'*(len(tag)+2)) + '*\\\n')
|
||||
output.write('|* ' + tag + ' *|\n')
|
||||
output.write('\\*' + ('*'*(len(tag)+2)) + '*/\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT EnumText EnumUtils<%s>::ToString(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for label, title, description in values:
|
||||
output.write(' case %s: return { %s, %s, %s };\n' % (prefix + label, strescape(label), strescape(title), strescape(description)))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::Count() { return %d; }\n' % (name, len(values)))
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT %s EnumUtils<%s>::FromIndex(size_t index)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (index)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %d: return %s;\n' % (i, prefix + label))
|
||||
output.write(' default: throw std::out_of_range("index");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::ToIndex(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %s: return %d;\n' % (prefix + label, i))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
|
||||
output.write('}\n')
|
||||
|
||||
FilenamePattern = compile(r'^(.+)\.h$')
|
||||
for root, dirs, files in walk('.'):
|
||||
for n in files:
|
||||
nameMatch = FilenamePattern.match(n)
|
||||
if nameMatch is not None:
|
||||
processFile(root, nameMatch.group(1))
|
238
apps/Fmt/FormatReplace.py
Normal file
238
apps/Fmt/FormatReplace.py
Normal file
@ -0,0 +1,238 @@
|
||||
import pathlib
|
||||
from os import getcwd
|
||||
|
||||
if not getcwd().endswith('src') and not getcwd().endswith('modules'):
|
||||
print('Run this from the src or modules directory!')
|
||||
print('(Invoke as \'python ../apps/Fmt/FormatReplace.py\')')
|
||||
exit(1)
|
||||
|
||||
def isASSERT(line):
|
||||
substring = 'ASSERT'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isABORTMSG(line):
|
||||
substring = 'ABORT_MSG'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def islog(line):
|
||||
substring = 'LOG_'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
# def isSendSysMessage(line):
|
||||
# substring = 'SendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
# def isPSendSysMessage(line):
|
||||
# substring = 'PSendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
def isPQuery(line):
|
||||
substring = 'PQuery'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPExecute(line):
|
||||
substring = 'PExecute'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPAppend(line):
|
||||
substring = 'PAppend'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
# def isStringFormat(line):
|
||||
# substring = 'StringFormat'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
def haveDelimeter(line):
|
||||
if ';' in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def checkSoloLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), False
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), False
|
||||
elif islog(line):
|
||||
return handleCleanup(line), False
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), False
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), False
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), False
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
# elif isStringFormat(line):
|
||||
# return handleCleanup(line), False
|
||||
else:
|
||||
return line, False
|
||||
|
||||
def startMultiLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), True
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), True
|
||||
elif islog(line):
|
||||
return handleCleanup(line), True
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), True
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), True
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), True
|
||||
# elif isStringFormat(line):
|
||||
# return handleCleanup(line), True
|
||||
else :
|
||||
return line, False
|
||||
|
||||
def continueMultiLine(line, existPrevLine):
|
||||
if haveDelimeter(line):
|
||||
existPrevLine = False;
|
||||
return handleCleanup(line), existPrevLine
|
||||
|
||||
def checkTextLine(line, existPrevLine):
|
||||
if existPrevLine:
|
||||
return continueMultiLine(line, existPrevLine)
|
||||
else :
|
||||
if haveDelimeter(line):
|
||||
return checkSoloLine(line)
|
||||
else :
|
||||
return startMultiLine(line)
|
||||
|
||||
def handleCleanup(line):
|
||||
line = line.replace("%s", "{}");
|
||||
line = line.replace("%u", "{}");
|
||||
line = line.replace("%hu", "{}");
|
||||
line = line.replace("%lu", "{}");
|
||||
line = line.replace("%llu", "{}");
|
||||
line = line.replace("%zu", "{}");
|
||||
line = line.replace("%02u", "{:02}");
|
||||
line = line.replace("%03u", "{:03}");
|
||||
line = line.replace("%04u", "{:04}");
|
||||
line = line.replace("%05u", "{:05}");
|
||||
line = line.replace("%02i", "{:02}");
|
||||
line = line.replace("%03i", "{:03}");
|
||||
line = line.replace("%04i", "{:04}");
|
||||
line = line.replace("%05i", "{:05}");
|
||||
line = line.replace("%02d", "{:02}");
|
||||
line = line.replace("%03d", "{:03}");
|
||||
line = line.replace("%04d", "{:04}");
|
||||
line = line.replace("%05d", "{:05}");
|
||||
line = line.replace("%d", "{}");
|
||||
line = line.replace("%i", "{}");
|
||||
line = line.replace("%x", "{:x}");
|
||||
line = line.replace("%X", "{:X}");
|
||||
line = line.replace("%lx", "{:x}");
|
||||
line = line.replace("%lX", "{:X}");
|
||||
line = line.replace("%02X", "{:02X}");
|
||||
line = line.replace("%08X", "{:08X}");
|
||||
line = line.replace("%f", "{}");
|
||||
line = line.replace("%.1f", "{0:.1f}");
|
||||
line = line.replace("%.2f", "{0:.2f}");
|
||||
line = line.replace("%.3f", "{0:.3f}");
|
||||
line = line.replace("%.4f", "{0:.4f}");
|
||||
line = line.replace("%.5f", "{0:.5f}");
|
||||
line = line.replace("%3.1f", "{:3.1f}");
|
||||
line = line.replace("%%", "%");
|
||||
line = line.replace(".c_str()", "");
|
||||
line = line.replace("\" SZFMTD \"", "{}");
|
||||
line = line.replace("\" UI64FMTD \"", "{}");
|
||||
# line = line.replace("\" STRING_VIEW_FMT \"", "{}");
|
||||
# line = line.replace("STRING_VIEW_FMT_ARG", "");
|
||||
return line
|
||||
|
||||
def getDefaultfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def getModifiedfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
prevLines = False
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
line, prevLines = checkTextLine(line, prevLines)
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def updModifiedfile(name, text):
|
||||
file = open(name, "w", encoding="utf8", errors='replace')
|
||||
file.write(text)
|
||||
file.close()
|
||||
|
||||
def handlefile(name):
|
||||
oldtext = getDefaultfile(name)
|
||||
newtext = getModifiedfile(name)
|
||||
|
||||
if oldtext != newtext:
|
||||
updModifiedfile(name, newtext)
|
||||
|
||||
p = pathlib.Path('.')
|
||||
for i in p.glob('**/*'):
|
||||
fname = i.absolute()
|
||||
if '.cpp' in i.name:
|
||||
handlefile(fname)
|
||||
if '.h' in i.name:
|
||||
handlefile(fname)
|
7
apps/account-create/.formatter.exs
Normal file
7
apps/account-create/.formatter.exs
Normal file
@ -0,0 +1,7 @@
|
||||
# Used by "mix format"
|
||||
[
|
||||
inputs: [
|
||||
".formatter.exs",
|
||||
"account.exs"
|
||||
]
|
||||
]
|
10
apps/account-create/Dockerfile
Normal file
10
apps/account-create/Dockerfile
Normal file
@ -0,0 +1,10 @@
|
||||
FROM elixir:1.14-slim
|
||||
|
||||
RUN mix local.hex --force && \
|
||||
mix local.rebar --force
|
||||
|
||||
COPY account.exs /account.exs
|
||||
COPY srp.exs /srp.exs
|
||||
RUN chmod +x /account.exs
|
||||
|
||||
CMD /account.exs
|
102
apps/account-create/README.md
Normal file
102
apps/account-create/README.md
Normal file
@ -0,0 +1,102 @@
|
||||
# Account.exs
|
||||
|
||||
Simple script to create an account for AzerothCore
|
||||
|
||||
This script allows a server admin to create a user automatically when after the `dbimport` tool runs, without needed to open up the `worldserver` console.
|
||||
|
||||
## How To Use
|
||||
|
||||
### Pre-requisites
|
||||
|
||||
- MySQL is running
|
||||
- The authserver database (`acore_auth`, typically) has a table named `account`
|
||||
|
||||
### Running
|
||||
|
||||
```bash
|
||||
$ elixir account.exs
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
This script reads from environment variables in order to control which account it creates and the MySQL server it's communicating with
|
||||
|
||||
|
||||
- `ACORE_USERNAME` Username for account, default "admin"
|
||||
- `ACORE_PASSWORD` Password for account, default "admin"
|
||||
- `ACORE_GM_LEVEL` GM Level for account, default 3
|
||||
- `MYSQL_DATABASE` Database name, default "acore_auth"
|
||||
- `MYSQL_USERNAME` MySQL username, default "root"
|
||||
- `MYSQL_PASSWORD` MySQL password, default "password"
|
||||
- `MYSQL_PORT` MySQL Port, default 3306
|
||||
- `MYSQL_HOST` MySQL Host, default "localhost"
|
||||
|
||||
To use these environment variables, execute the script like so:
|
||||
|
||||
```bash
|
||||
$ MYSQL_HOST=mysql \
|
||||
MYSQL_PASSWORD="fourthehoard" \
|
||||
ACORE_USERNAME=drekthar \
|
||||
ACORE_PASSWORD=securepass22 \
|
||||
elixir account.exs
|
||||
```
|
||||
|
||||
This can also be used in a loop. Consider this csv file:
|
||||
|
||||
```csv
|
||||
user,pass,gm_level
|
||||
admin,adminpass,2
|
||||
soapuser,soappass,3
|
||||
mainuser,userpass,0
|
||||
```
|
||||
|
||||
You can then loop over this csv file, and manage users like so:
|
||||
|
||||
```bash
|
||||
$ while IFS=, read -r user pass gm; do
|
||||
ACORE_USERNAME=$user \
|
||||
ACORE_PASSWORD=$pass \
|
||||
GM_LEVEL=$gm \
|
||||
elixir account.exs
|
||||
done <<< $(tail -n '+2' users.csv)
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
Running and building with docker is simple:
|
||||
|
||||
```bash
|
||||
$ docker build -t acore/account-create .
|
||||
$ docker run \
|
||||
-e MYSQL_HOST=mysql \
|
||||
-v mix_cache:/root/.cache/mix/installs \
|
||||
acore/account-create
|
||||
```
|
||||
|
||||
Note that the `MYSQL_HOST` is required to be set with the docker container, as the default setting targets `localhost`.
|
||||
|
||||
### docker-compose
|
||||
|
||||
A simple way to integrate this into a docker-compose file.
|
||||
|
||||
This is why I wrote this script - an automatic way to have an admin account idempotently created on startup of the server.
|
||||
|
||||
```yaml
|
||||
services:
|
||||
account-create:
|
||||
image: acore/account-create:${DOCKER_IMAGE_TAG:-master}
|
||||
build:
|
||||
context: apps/account-create/
|
||||
dockerfile: apps/account-create/Dockerfile
|
||||
environment:
|
||||
MYSQL_HOST: ac-database
|
||||
MYSQL_PASSWORD: ${DOCKER_DB_ROOT_PASSWORD:-password}
|
||||
ACORE_USERNAME: ${ACORE_ROOT_ADMIN_ACCOUNT:-admin}
|
||||
ACORE_PASSWORD: ${ACORE_ROOT_ADMIN_PASSWORD:-password}
|
||||
volumes:
|
||||
- mix_cache:/root/.cache/mix/installs
|
||||
profiles: [local, app, db-import-local]
|
||||
depends_on:
|
||||
ac-db-import:
|
||||
condition: service_completed_successfully
|
||||
```
|
134
apps/account-create/account.exs
Normal file
134
apps/account-create/account.exs
Normal file
@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env elixir
|
||||
# Execute this Elixir script with the below command
|
||||
#
|
||||
# $ ACORE_USERNAME=foo ACORE_PASSWORD=barbaz123 elixir account.exs
|
||||
#
|
||||
# Set environment variables for basic configuration
|
||||
#
|
||||
# ACORE_USERNAME - Username for account, default "admin"
|
||||
# ACORE_PASSWORD - Password for account, default "admin"
|
||||
# ACORE_GM_LEVEL - GM level for account
|
||||
# MYSQL_DATABASE - Database name, default "acore_auth"
|
||||
# MYSQL_USERNAME - MySQL username, default "root"
|
||||
# MYSQL_PASSWORD - MySQL password, default "password"
|
||||
# MYSQL_PORT - MySQL Port, default 3306
|
||||
# MYSQL_HOST - MySQL Host, default "localhost"
|
||||
|
||||
# Install remote dependencies
|
||||
[
|
||||
{:myxql, "~> 0.6.0"}
|
||||
]
|
||||
|> Mix.install()
|
||||
|
||||
# Start the logger
|
||||
Application.start(:logger)
|
||||
require Logger
|
||||
|
||||
# Constants
|
||||
default_credential = "admin"
|
||||
default_gm_level = "3"
|
||||
account_access_comment = "Managed via account-create script"
|
||||
|
||||
# Import srp functions
|
||||
Code.require_file("srp.exs", Path.absname(__DIR__))
|
||||
|
||||
# Assume operator provided a "human-readable" name.
|
||||
# The database stores usernames in all caps
|
||||
username_lower =
|
||||
System.get_env("ACORE_USERNAME", default_credential)
|
||||
|> tap(&Logger.info("Account to create: #{&1}"))
|
||||
|
||||
username = String.upcase(username_lower)
|
||||
|
||||
password = System.get_env("ACORE_PASSWORD", default_credential)
|
||||
|
||||
gm_level = System.get_env("ACORE_GM_LEVEL", default_gm_level) |> String.to_integer()
|
||||
|
||||
if Range.new(0, 3) |> Enum.member?(gm_level) |> Kernel.not do
|
||||
Logger.info("Valid ACORE_GM_LEVEL values are 0, 1, 2, and 3. The given value was: #{gm_level}.")
|
||||
end
|
||||
|
||||
{:ok, pid} =
|
||||
MyXQL.start_link(
|
||||
protocol: :tcp,
|
||||
database: System.get_env("MYSQL_DATABASE", "acore_auth"),
|
||||
username: System.get_env("MYSQL_USERNAME", "root"),
|
||||
password: System.get_env("MYSQL_PASSWORD", "password"),
|
||||
port: System.get_env("MYSQL_PORT", "3306") |> String.to_integer(),
|
||||
hostname: System.get_env("MYSQL_HOST", "localhost")
|
||||
)
|
||||
|
||||
Logger.info("MySQL connection created")
|
||||
|
||||
Logger.info("Checking database for user #{username_lower}")
|
||||
|
||||
# Check if user already exists in database
|
||||
{:ok, result} = MyXQL.query(pid, "SELECT salt FROM account WHERE username=?", [username])
|
||||
|
||||
%{salt: salt, verifier: verifier} =
|
||||
case result do
|
||||
%{rows: [[salt | _] | _]} ->
|
||||
Logger.info("Salt for #{username_lower} found in database")
|
||||
# re-use the salt if the user exists in database
|
||||
Srp.generate_stored_values(username, password, salt)
|
||||
_ ->
|
||||
Logger.info("Salt not found in database for #{username_lower}. Generating a new one")
|
||||
Srp.generate_stored_values(username, password)
|
||||
end
|
||||
|
||||
Logger.info("New salt and verifier generated")
|
||||
|
||||
# Insert values into DB, replacing the verifier if the user already exists
|
||||
result =
|
||||
MyXQL.query(
|
||||
pid,
|
||||
"""
|
||||
INSERT INTO account
|
||||
(`username`, `salt`, `verifier`)
|
||||
VALUES
|
||||
(?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE verifier=?
|
||||
""",
|
||||
[username, salt, verifier, verifier]
|
||||
)
|
||||
|
||||
case result do
|
||||
{:error, %{message: message}} ->
|
||||
File.write("fail.log", message)
|
||||
|
||||
Logger.info(
|
||||
"Account #{username_lower} failed to create. You can check the error message at fail.log."
|
||||
)
|
||||
|
||||
exit({:shutdown, 1})
|
||||
|
||||
# if num_rows changed and last_insert_id == 0, it means the verifier matched. No change necessary
|
||||
{:ok, %{num_rows: 1, last_insert_id: 0}} ->
|
||||
Logger.info(
|
||||
"Account #{username_lower} doesn't need to have its' password changed. You should be able to log in with that account"
|
||||
)
|
||||
|
||||
{:ok, %{num_rows: 1}} ->
|
||||
Logger.info(
|
||||
"Account #{username_lower} has been created. You should now be able to login with that account"
|
||||
)
|
||||
|
||||
{:ok, %{num_rows: 2}} ->
|
||||
Logger.info(
|
||||
"Account #{username_lower} has had its' password reset. You should now be able to login with that account"
|
||||
)
|
||||
end
|
||||
|
||||
# Set GM level to configured value
|
||||
{:ok, _} =
|
||||
MyXQL.query(
|
||||
pid,
|
||||
"""
|
||||
INSERT INTO account_access
|
||||
(`id`, `gmlevel`, `comment`)
|
||||
VALUES
|
||||
((SELECT id FROM account WHERE username=?), ?, ?)
|
||||
ON DUPLICATE KEY UPDATE gmlevel=?, comment=?
|
||||
""", [username, gm_level, account_access_comment, gm_level, account_access_comment])
|
||||
|
||||
Logger.info("GM Level for #{username_lower} set to #{gm_level}")
|
59
apps/account-create/srp.exs
Normal file
59
apps/account-create/srp.exs
Normal file
@ -0,0 +1,59 @@
|
||||
defmodule Srp do
|
||||
# Constants required in WoW's SRP6 implementation
|
||||
@n <<137, 75, 100, 94, 137, 225, 83, 91, 189, 173, 91, 139, 41, 6, 80, 83, 8, 1, 177, 142, 191,
|
||||
191, 94, 143, 171, 60, 130, 135, 42, 62, 155, 183>>
|
||||
@g <<7>>
|
||||
@field_length 32
|
||||
|
||||
# Wrapper function
|
||||
def generate_stored_values(username, password, salt \\ "") do
|
||||
default_state()
|
||||
|> generate_salt(salt)
|
||||
|> calculate_x(username, password)
|
||||
|> calculate_v()
|
||||
end
|
||||
|
||||
def default_state() do
|
||||
%{n: @n, g: @g}
|
||||
end
|
||||
|
||||
# Generate salt if it's not defined
|
||||
def generate_salt(state, "") do
|
||||
salt = :crypto.strong_rand_bytes(32)
|
||||
Map.merge(state, %{salt: salt})
|
||||
end
|
||||
|
||||
# Don't generate salt if it's already defined
|
||||
def generate_salt(state, salt) do
|
||||
padded_salt = pad_binary(salt)
|
||||
Map.merge(state, %{salt: padded_salt})
|
||||
end
|
||||
|
||||
# Get h1
|
||||
def calculate_x(state, username, password) do
|
||||
hash = :crypto.hash(:sha, String.upcase(username) <> ":" <> String.upcase(password))
|
||||
x = reverse(:crypto.hash(:sha, state.salt <> hash))
|
||||
Map.merge(state, %{x: x, username: username})
|
||||
end
|
||||
|
||||
# Get h2
|
||||
def calculate_v(state) do
|
||||
verifier =
|
||||
:crypto.mod_pow(state.g, state.x, state.n)
|
||||
|> reverse()
|
||||
|> pad_binary()
|
||||
|
||||
Map.merge(state, %{verifier: verifier})
|
||||
end
|
||||
|
||||
defp pad_binary(blob) do
|
||||
pad = @field_length - byte_size(blob)
|
||||
<<blob::binary, 0::pad*8>>
|
||||
end
|
||||
|
||||
defp reverse(binary) do
|
||||
binary
|
||||
|> :binary.decode_unsigned(:big)
|
||||
|> :binary.encode_unsigned(:little)
|
||||
end
|
||||
end
|
26
apps/bash_shared/common.sh
Normal file
26
apps/bash_shared/common.sh
Normal file
@ -0,0 +1,26 @@
|
||||
function registerHooks() { acore_event_registerHooks "$@"; }
|
||||
function runHooks() { acore_event_runHooks "$@"; }
|
||||
|
||||
source "$AC_PATH_CONF/dist/config.sh" # include dist to avoid missing conf variables
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
USER_CONF_PATH=${USER_CONF_PATH:-"$AC_PATH_CONF/config.sh"}
|
||||
|
||||
if [ -f "$USER_CONF_PATH" ]; then
|
||||
source "$USER_CONF_PATH" # should overwrite previous
|
||||
else
|
||||
echo "NOTICE: file <$USER_CONF_PATH> not found, we use default configuration only."
|
||||
fi
|
||||
|
||||
#
|
||||
# Load modules
|
||||
#
|
||||
|
||||
for entry in "$AC_PATH_MODULES/"*/include.sh
|
||||
do
|
||||
if [ -e "$entry" ]; then
|
||||
source "$entry"
|
||||
fi
|
||||
done
|
||||
|
||||
ACORE_VERSION=$("$AC_PATH_DEPS/jsonpath/JSONPath.sh" -f "$AC_PATH_ROOT/acore.json" -b '$.version')
|
28
apps/bash_shared/defines.sh
Normal file
28
apps/bash_shared/defines.sh
Normal file
@ -0,0 +1,28 @@
|
||||
unamestr=$(uname)
|
||||
if [[ "$unamestr" == 'Darwin' ]]; then
|
||||
if ! command -v brew &>/dev/null ; then
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
fi
|
||||
if ! [ "${BASH_VERSINFO}" -ge 4 ]; then
|
||||
brew install bash
|
||||
fi
|
||||
if ! command -v greadlink &>/dev/null ; then
|
||||
brew install coreutils
|
||||
fi
|
||||
AC_PATH_ROOT=$(greadlink -f "$AC_PATH_APPS/../")
|
||||
else
|
||||
AC_PATH_ROOT=$(readlink -f "$AC_PATH_APPS/../")
|
||||
fi
|
||||
|
||||
case $AC_PATH_ROOT in
|
||||
/*) AC_PATH_ROOT=$AC_PATH_ROOT;;
|
||||
*) AC_PATH_ROOT=$PWD/$AC_PATH_ROOT;;
|
||||
esac
|
||||
|
||||
AC_PATH_CONF="$AC_PATH_ROOT/conf"
|
||||
|
||||
AC_PATH_MODULES="$AC_PATH_ROOT/modules"
|
||||
|
||||
AC_PATH_DEPS="$AC_PATH_ROOT/deps"
|
||||
|
||||
AC_PATH_VAR="$AC_PATH_ROOT/var"
|
16
apps/bash_shared/includes.sh
Normal file
16
apps/bash_shared/includes.sh
Normal file
@ -0,0 +1,16 @@
|
||||
[[ ${GUARDYVAR:-} -eq 1 ]] && return || readonly GUARDYVAR=1 # include it once
|
||||
|
||||
# force default language for applications
|
||||
LC_ALL=C
|
||||
|
||||
AC_PATH_APPS="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )"
|
||||
|
||||
AC_PATH_SHARED="$AC_PATH_APPS/bash_shared"
|
||||
|
||||
source "$AC_PATH_SHARED/defines.sh"
|
||||
|
||||
source "$AC_PATH_DEPS/acore/bash-lib/src/event/hooks.sh"
|
||||
|
||||
source "$AC_PATH_SHARED/common.sh"
|
||||
|
||||
[[ "$OSTYPE" = "msys" ]] && AC_BINPATH_FULL="$BINPATH" || AC_BINPATH_FULL="$BINPATH/bin"
|
40
apps/ci/ci-codestyle.sh
Normal file
40
apps/ci/ci-codestyle.sh
Normal file
@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Codestyle check script:"
|
||||
echo
|
||||
|
||||
declare -A singleLineRegexChecks=(
|
||||
["LOG_.+GetCounter"]="Use ObjectGuid::ToString().c_str() method instead of ObjectGuid::GetCounter() when logging. Check the lines above"
|
||||
["[[:blank:]]$"]="Remove whitespace at the end of the lines above"
|
||||
["\t"]="Replace tabs with 4 spaces in the lines above"
|
||||
)
|
||||
|
||||
for check in ${!singleLineRegexChecks[@]}; do
|
||||
echo " Checking RegEx: '${check}'"
|
||||
|
||||
if grep -P -r -I -n ${check} src; then
|
||||
echo
|
||||
echo "${singleLineRegexChecks[$check]}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
declare -A multiLineRegexChecks=(
|
||||
["LOG_[^;]+GetCounter"]="Use ObjectGuid::ToString().c_str() method instead of ObjectGuid::GetCounter() when logging. Check the lines above"
|
||||
["\n\n\n"]="Multiple blank lines detected, keep only one. Check the files above"
|
||||
)
|
||||
|
||||
for check in ${!multiLineRegexChecks[@]}; do
|
||||
echo " Checking RegEx: '${check}'"
|
||||
|
||||
if grep -Pzo -r -I ${check} src; then
|
||||
echo
|
||||
echo
|
||||
echo "${multiLineRegexChecks[$check]}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Everything looks good"
|
8
apps/ci/ci-compile.sh
Normal file
8
apps/ci/ci-compile.sh
Normal file
@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "compile core"
|
||||
export AC_CCACHE=true
|
||||
./acore.sh "compiler" "all"
|
||||
|
69
apps/ci/ci-conf-core.sh
Normal file
69
apps/ci/ci-conf-core.sh
Normal file
@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CTOOLS_BUILD=none
|
||||
CSCRIPTS=static
|
||||
CMODULES=static
|
||||
CBUILD_TESTING=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
36
apps/ci/ci-conf-db.sh
Normal file
36
apps/ci/ci-conf-db.sh
Normal file
@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=db-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
67
apps/ci/ci-conf-tools.sh
Normal file
67
apps/ci/ci-conf-tools.sh
Normal file
@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=maps-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
15
apps/ci/ci-dry-run.sh
Normal file
15
apps/ci/ci-dry-run.sh
Normal file
@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Start mysql
|
||||
sudo systemctl start mysql
|
||||
|
||||
source "$CURRENT_PATH/ci-gen-server-conf-files.sh" $1 "etc" "bin" "root"
|
||||
|
||||
(cd ./env/dist/bin/ && timeout 5m ./$APP_NAME -dry-run)
|
||||
|
||||
# Stop mysql
|
||||
sudo systemctl stop mysql
|
18
apps/ci/ci-error-check.sh
Normal file
18
apps/ci/ci-error-check.sh
Normal file
@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ERRORS_FILE="./env/dist/bin/Errors.log";
|
||||
|
||||
echo "Checking Startup Errors"
|
||||
echo
|
||||
|
||||
if [[ -s ${ERRORS_FILE} ]]; then
|
||||
printf "The Errors.log file contains startup errors:\n\n";
|
||||
cat ${ERRORS_FILE};
|
||||
printf "\nPlease solve the startup errors listed above!\n";
|
||||
exit 1;
|
||||
else
|
||||
echo "> No startup errors found in Errors.log";
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Done"
|
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
@ -0,0 +1,15 @@
|
||||
APP_NAME=$1
|
||||
CONFIG_FOLDER=${2:-"etc"}
|
||||
BIN_FOLDER=${3-"bin"}
|
||||
MYSQL_ROOT_PASSWORD=${4:-""}
|
||||
|
||||
# copy dist files to conf files
|
||||
cp ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf.dist ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
# replace login info
|
||||
sed -i "s/127.0.0.1;3306;acore;acore/localhost;3306;root;$MYSQL_ROOT_PASSWORD/" ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
if [[ $APP_NAME == "worldserver" ]]; then
|
||||
sed -i 's/DataDir = \".\"/DataDir = \".\/data"/' ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
git clone --depth=1 --branch=master --single-branch https://github.com/ac-data/ac-data.git ./env/dist/$BIN_FOLDER/data
|
||||
fi
|
29
apps/ci/ci-install-modules.sh
Normal file
29
apps/ci/ci-install-modules.sh
Normal file
@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "install modules"
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-eluna.git modules/mod-eluna
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-autobalance.git modules/mod-autobalance
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ah-bot.git modules/mod-ah-bot
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-anticheat.git modules/mod-anticheat
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-item-reward.git modules/mod-bg-item-reward
|
||||
# NOTE: disabled because it causes DB error
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-azerothshard.git modules/mod-azerothshard
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cfbg.git modules/mod-cfbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-transmitter modules/mod-chat-transmitter
|
||||
# NOTE: disabled because it causes DB error
|
||||
#git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chromie-xp.git modules/mod-chromie-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cta-switch.git modules/mod-cta-switch
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-desertion-warnings.git modules/mod-desertion-warnings
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-duel-reset.git modules/mod-duel-reset
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ip-tracker.git modules/mod-ip-tracker
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-low-level-arena.git modules/mod-low-level-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-low-level-rbg.git modules/mod-low-level-rbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-multi-client-check.git modules/mod-multi-client-check
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-titles.git modules/mod-pvp-titles
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpstats-announcer.git modules/mod-pvpstats-announcer
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-queue-list-cache.git modules/mod-queue-list-cache
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-server-auto-shutdown.git modules/mod-server-auto-shutdown
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-transmog.git modules/mod-transmog
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-progression-system.git modules/mod-progression-system
|
74
apps/ci/ci-install.sh
Normal file
74
apps/ci/ci-install.sh
Normal file
@ -0,0 +1,74 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CSCRIPTS=static
|
||||
CBUILD_TESTING=ON
|
||||
CSERVERS=ON
|
||||
CTOOLS=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
time sudo apt-get update -y
|
||||
# time sudo apt-get upgrade -y
|
||||
time sudo apt-get install -y git lsb-release sudo
|
||||
time ./acore.sh install-deps
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
54
apps/ci/ci-pending-changelogs.ts
Normal file
54
apps/ci/ci-pending-changelogs.ts
Normal file
@ -0,0 +1,54 @@
|
||||
import * as semver from "https://deno.land/x/semver/mod.ts";
|
||||
|
||||
// specify the needed paths here
|
||||
const CHANGELOG_PATH = "doc/changelog";
|
||||
const CHANGELOG_PENDING_PATH = `${CHANGELOG_PATH}/pendings`;
|
||||
const CHANGELOG_MASTER_FILE = `${CHANGELOG_PATH}/master.md`;
|
||||
const ACORE_JSON = "./acore.json";
|
||||
|
||||
// read the acore.json file to work with the versioning
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
const data = await Deno.readFile(ACORE_JSON);
|
||||
const acoreInfo = JSON.parse(decoder.decode(data));
|
||||
|
||||
let changelogText = await Deno.readTextFile(CHANGELOG_MASTER_FILE);
|
||||
|
||||
const currentVersion = acoreInfo.version;
|
||||
|
||||
const res=Deno.run({ cmd: [ "git", "rev-parse",
|
||||
"HEAD"],
|
||||
stdout: 'piped',
|
||||
stderr: 'piped',
|
||||
stdin: 'null' });
|
||||
await res.status();
|
||||
const gitVersion = new TextDecoder().decode(await res.output());
|
||||
|
||||
|
||||
for await (const dirEntry of Deno.readDir(CHANGELOG_PENDING_PATH)) {
|
||||
if (!dirEntry.isFile || !dirEntry.name.endsWith(".md")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Upgrade the prerelease version number (e.g. 1.0.0-dev.1 -> 1.0.0-dev.2)
|
||||
acoreInfo.version = semver.inc(acoreInfo.version, "prerelease", {
|
||||
includePrerelease: true,
|
||||
});
|
||||
|
||||
// read the pending file found and add it at the beginning of the changelog text
|
||||
const data = await Deno.readTextFile(
|
||||
`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`,
|
||||
);
|
||||
changelogText = `## ${acoreInfo.version} | Commit: [${gitVersion}](https://github.com/azerothcore/azerothcore-wotlk/commit/${gitVersion}\n\n${data}\n${changelogText}`;
|
||||
|
||||
// remove the pending file
|
||||
await Deno.remove(`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`);
|
||||
}
|
||||
|
||||
// write to acore.json and master.md only if new version is available
|
||||
if (currentVersion != acoreInfo.version) {
|
||||
console.log(`Changelog version upgraded from ${currentVersion} to ${acoreInfo.version}`)
|
||||
Deno.writeTextFile(CHANGELOG_MASTER_FILE, changelogText);
|
||||
Deno.writeTextFile(ACORE_JSON, JSON.stringify(acoreInfo, null, 2)+"\n");
|
||||
} else {
|
||||
console.log("No changelogs to add")
|
||||
}
|
84
apps/ci/ci-pending-sql.sh
Normal file
84
apps/ci/ci-pending-sql.sh
Normal file
@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/../bash_shared/includes.sh"
|
||||
|
||||
UPDATES_PATH="$AC_PATH_ROOT/data/sql/updates/"
|
||||
|
||||
COMMIT_HASH=
|
||||
|
||||
function import() {
|
||||
db=$1
|
||||
folder="db_"$db
|
||||
pendingPath="$AC_PATH_ROOT/data/sql/updates/pending_$folder"
|
||||
updPath="$UPDATES_PATH/$folder"
|
||||
archivedPath="$AC_PATH_ROOT/data/sql/archive/$folder/6.x"
|
||||
|
||||
latestUpd=$(ls -1 $updPath/ | tail -n 1)
|
||||
|
||||
if [ -z $latestUpd ]; then
|
||||
latestUpd=$(ls -1 $archivedPath/ | tail -n 1)
|
||||
echo "> Last update file for db $db is missing! Using archived file" $latestUpd
|
||||
fi
|
||||
|
||||
dateToday=$(date +%Y_%m_%d)
|
||||
counter=0
|
||||
|
||||
dateLast=$latestUpd
|
||||
tmp=${dateLast#*_*_*_}
|
||||
oldCnt=${tmp%.sql}
|
||||
oldDate=${dateLast%_$tmp}
|
||||
|
||||
if [ "$oldDate" = "$dateToday" ]; then
|
||||
((counter=10#$oldCnt+1)) # 10 # is needed to explictly add to a base 10 number
|
||||
fi;
|
||||
|
||||
for entry in "$pendingPath"/*.sql
|
||||
do
|
||||
if [[ -e $entry ]]; then
|
||||
oldVer=$oldDate"_"$oldCnt
|
||||
|
||||
cnt=$(printf -v counter "%02d" $counter ; echo $counter)
|
||||
|
||||
newVer=$dateToday"_"$cnt
|
||||
|
||||
newFile="$updPath/"$dateToday"_"$cnt".sql"
|
||||
|
||||
oldFile=$(basename "$entry")
|
||||
prefix=${oldFile%_*.sql}
|
||||
suffix=${oldFile#rev_}
|
||||
rev=${suffix%.sql}
|
||||
|
||||
isRev=0
|
||||
if [[ $prefix = "rev" && $rev =~ ^-?[0-9]+$ ]]; then
|
||||
isRev=1
|
||||
fi
|
||||
|
||||
echo "-- DB update $oldVer -> $newVer" > "$newFile";
|
||||
|
||||
cat $entry >> "$newFile";
|
||||
|
||||
currentHash="$(git log --diff-filter=A "$entry" | grep "^commit " | sed -e 's/commit //')"
|
||||
|
||||
if [[ "$COMMIT_HASH" != *"$currentHash"* ]]
|
||||
then
|
||||
COMMIT_HASH="$COMMIT_HASH $currentHash"
|
||||
fi
|
||||
|
||||
rm $entry;
|
||||
|
||||
oldDate=$dateToday
|
||||
oldCnt=$cnt
|
||||
|
||||
((counter+=1))
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
import "world"
|
||||
import "characters"
|
||||
import "auth"
|
||||
|
||||
echo "Done."
|
36
apps/ci/ci-pending.sh
Normal file
36
apps/ci/ci-pending.sh
Normal file
@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Pending SQL check script:"
|
||||
echo
|
||||
|
||||
# We want to ensure the end of file has a semicolon and doesn't have extra
|
||||
# newlines
|
||||
find data/sql/updates/pending* -name "*.sql" -type f | while read -r file; do
|
||||
# The first sed script collapses all strings into an empty string. The
|
||||
# contents of strings aren't necessary for this check and its still valid
|
||||
# sql.
|
||||
#
|
||||
# The second rule removes sql comments.
|
||||
ERR_AT_EOF="$(sed -e "s/'.*'/''/g" -e 's/ --([^-])*$//' "$file" | tr -d '\n ' | tail -c 1)"
|
||||
if [[ "$ERR_AT_EOF" != ";" ]]; then
|
||||
echo "Missing Semicolon (;) or multiple newlines at the end of the file."
|
||||
exit 1
|
||||
else
|
||||
echo "> Semicolon check - OK"
|
||||
fi
|
||||
done
|
||||
|
||||
find data/sql/updates/pending* -name "*.sql" -type f | while read -r file; do
|
||||
if sed "s/'.*'\(.*\)/\1/g" "$file" | grep -q -i -E "broadcast_text"; then
|
||||
echo "> broadcast_text check - Failed"
|
||||
echo " - DON'T EDIT broadcast_text TABLE UNLESS YOU KNOW WHAT YOU ARE DOING!"
|
||||
echo " - This error can safely be ignored if the changes are approved to be sniffed."
|
||||
exit 1
|
||||
else
|
||||
echo "> broadcast_text check - OK"
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Everything looks good"
|
3
apps/ci/ci-run-unit-tests.sh
Normal file
3
apps/ci/ci-run-unit-tests.sh
Normal file
@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
time var/build/obj/src/test/unit_tests
|
36
apps/ci/mac/ci-compile.sh
Normal file
36
apps/ci/mac/ci-compile.sh
Normal file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export OPENSSL_ROOT_DIR=$(brew --prefix openssl@3)
|
||||
|
||||
export CCACHE_CPP2=true
|
||||
export CCACHE_MAXSIZE='500M'
|
||||
export CCACHE_COMPRESS=1
|
||||
export CCACHE_COMPRESSLEVEL=9
|
||||
ccache -s
|
||||
|
||||
cd var/build/obj
|
||||
|
||||
time cmake ../../../ \
|
||||
-DTOOLS=1 \
|
||||
-DBUILD_TESTING=1 \
|
||||
-DSCRIPTS=static \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DMYSQL_ADD_INCLUDE_PATH=/usr/local/include \
|
||||
-DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib \
|
||||
-DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include \
|
||||
-DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib \
|
||||
-DOPENSSL_INCLUDE_DIR="$OPENSSL_ROOT_DIR/include" \
|
||||
-DOPENSSL_SSL_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libssl.dylib" \
|
||||
-DOPENSSL_CRYPTO_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libcrypto.dylib" \
|
||||
-DWITH_WARNINGS=1 \
|
||||
-DCMAKE_C_FLAGS="-Werror" \
|
||||
-DCMAKE_CXX_FLAGS="-Werror" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
||||
-DUSE_SCRIPTPCH=0 \
|
||||
-DUSE_COREPCH=0 \
|
||||
;
|
||||
|
||||
time make -j $(($(sysctl -n hw.ncpu ) + 2))
|
||||
|
||||
ccache -s
|
2
apps/compiler/.gitignore
vendored
Normal file
2
apps/compiler/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
config.sh
|
||||
|
32
apps/compiler/README.md
Normal file
32
apps/compiler/README.md
Normal file
@ -0,0 +1,32 @@
|
||||
## How to compile:
|
||||
|
||||
first of all, if you need some custom configuration you have to copy
|
||||
/conf/dist/config.sh in /conf/config.sh and configure it
|
||||
|
||||
* for a "clean" compilation you must run all scripts in their order:
|
||||
|
||||
./1-clean.sh
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you add/rename/delete some sources and you need to compile it you have to run:
|
||||
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you have modified code only, you just need to run
|
||||
|
||||
./3-build.sh
|
||||
|
||||
|
||||
## compiler.sh
|
||||
|
||||
compiler.sh script contains an interactive menu to clean/compile/build. You can also run actions directly by command lines specifying the option.
|
||||
Ex:
|
||||
./compiler.sh 3
|
||||
|
||||
It will start the build process (it's equivalent to ./3-build.sh)
|
||||
|
||||
## Note:
|
||||
|
||||
For an optimal development process and **really faster** compilation time, is suggested to use clang instead of gcc
|
76
apps/compiler/compiler.sh
Normal file
76
apps/compiler/compiler.sh
Normal file
@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/includes/includes.sh"
|
||||
|
||||
function run_option() {
|
||||
re='^[0-9]+$'
|
||||
if [[ $1 =~ $re ]] && test "${comp_functions[$1-1]+'test'}"; then
|
||||
${comp_functions[$1-1]}
|
||||
elif [ -n "$(type -t comp_$1)" ] && [ "$(type -t comp_$1)" = function ]; then
|
||||
fun="comp_$1"
|
||||
$fun
|
||||
else
|
||||
echo "invalid option, use --help option for the commands list"
|
||||
fi
|
||||
}
|
||||
|
||||
function comp_quit() {
|
||||
exit 0
|
||||
}
|
||||
|
||||
comp_options=(
|
||||
"build: Configure and compile"
|
||||
"clean: Clean build files"
|
||||
"configure: Run CMake"
|
||||
"compile: Compile only"
|
||||
"all: clean, configure and compile"
|
||||
"ccacheClean: Clean ccache files, normally not needed"
|
||||
"ccacheShowStats: show ccache statistics"
|
||||
"quit: Close this menu")
|
||||
comp_functions=(
|
||||
"comp_build"
|
||||
"comp_clean"
|
||||
"comp_configure"
|
||||
"comp_compile"
|
||||
"comp_all"
|
||||
"comp_ccacheClean"
|
||||
"comp_ccacheShowStats"
|
||||
"comp_quit")
|
||||
|
||||
PS3='[ Please enter your choice ]: '
|
||||
|
||||
runHooks "ON_AFTER_OPTIONS" #you can create your custom options
|
||||
|
||||
function _switch() {
|
||||
_reply="$1"
|
||||
_opt="$2"
|
||||
|
||||
case $_reply in
|
||||
""|"--help")
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${options[@]}"
|
||||
;;
|
||||
*)
|
||||
run_option $_reply $_opt
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
|
||||
while true
|
||||
do
|
||||
# run option directly if specified in argument
|
||||
[ ! -z $1 ] && _switch $@
|
||||
[ ! -z $1 ] && exit 0
|
||||
|
||||
select opt in "${comp_options[@]}"
|
||||
do
|
||||
echo "==== ACORE COMPILER ===="
|
||||
_switch $REPLY
|
||||
break;
|
||||
done
|
||||
done
|
7
apps/compiler/includes/defines.sh
Normal file
7
apps/compiler/includes/defines.sh
Normal file
@ -0,0 +1,7 @@
|
||||
# you can choose build type from cmd argument
|
||||
if [ ! -z $1 ]
|
||||
then
|
||||
CCTYPE=$1
|
||||
CCTYPE=${CCTYPE^} # capitalize first letter if it's not yet
|
||||
fi
|
||||
|
170
apps/compiler/includes/functions.sh
Normal file
170
apps/compiler/includes/functions.sh
Normal file
@ -0,0 +1,170 @@
|
||||
|
||||
function comp_clean() {
|
||||
DIRTOCLEAN=${BUILDPATH:-var/build/obj}
|
||||
PATTERN="$DIRTOCLEAN/*"
|
||||
|
||||
echo "Cleaning build files in $DIRTOCLEAN"
|
||||
|
||||
[ -d "$DIRTOCLEAN" ] && rm -rf $PATTERN
|
||||
}
|
||||
|
||||
function comp_ccacheEnable() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
export CCACHE_MAXSIZE=${CCACHE_MAXSIZE:-'1000MB'}
|
||||
#export CCACHE_DEPEND=true
|
||||
export CCACHE_SLOPPINESS=${CCACHE_SLOPPINESS:-pch_defines,time_macros,include_file_mtime}
|
||||
export CCACHE_CPP2=${CCACHE_CPP2:-true} # optimization for clang
|
||||
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
|
||||
export CCACHE_COMPRESSLEVEL=${CCACHE_COMPRESSLEVEL:-9}
|
||||
export CCACHE_COMPILERCHECK=${CCACHE_COMPILERCHECK:-content}
|
||||
export CCACHE_LOGFILE=${CCACHE_LOGFILE:-"$CCACHE_DIR/cache.debug"}
|
||||
#export CCACHE_NODIRECT=true
|
||||
|
||||
export CCUSTOMOPTIONS="$CCUSTOMOPTIONS -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
}
|
||||
|
||||
function comp_ccacheClean() {
|
||||
[ "$AC_CCACHE" != true ] && echo "ccache is disabled" && return
|
||||
|
||||
echo "Cleaning ccache"
|
||||
ccache -C
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_ccacheResetStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -zc
|
||||
}
|
||||
|
||||
function comp_ccacheShowStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_configure() {
|
||||
CWD=$(pwd)
|
||||
|
||||
cd $BUILDPATH
|
||||
|
||||
echo "Build path: $BUILDPATH"
|
||||
echo "DEBUG info: $CDEBUG"
|
||||
echo "Compilation type: $CTYPE"
|
||||
echo "CCache: $AC_CCACHE"
|
||||
# -DCMAKE_BUILD_TYPE=$CCTYPE disable optimization "slow and huge amount of ram"
|
||||
# -DWITH_COREDEBUG=$CDEBUG compiled with debug information
|
||||
|
||||
#-DSCRIPTS_COMMANDS=$CSCRIPTS -DSCRIPTS_CUSTOM=$CSCRIPTS -DSCRIPTS_EASTERNKINGDOMS=$CSCRIPTS -DSCRIPTS_EVENTS=$CSCRIPTS -DSCRIPTS_KALIMDOR=$CSCRIPTS \
|
||||
#-DSCRIPTS_NORTHREND=$CSCRIPTS -DSCRIPTS_OUTDOORPVP=$CSCRIPTS -DSCRIPTS_OUTLAND=$CSCRIPTS -DSCRIPTS_PET=$CSCRIPTS -DSCRIPTS_SPELLS=$CSCRIPTS -DSCRIPTS_WORLD=$CSCRIPTS \
|
||||
#-DAC_WITH_UNIT_TEST=$CAC_UNIT_TEST -DAC_WITH_PLUGINS=$CAC_PLG \
|
||||
|
||||
local DCONF=""
|
||||
if [ ! -z "$CONFDIR" ]; then
|
||||
DCONF="-DCONF_DIR=$CONFDIR"
|
||||
fi
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
OSOPTIONS=""
|
||||
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
darwin*)
|
||||
OSOPTIONS=" -DMYSQL_ADD_INCLUDE_PATH=/usr/local/include -DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib -DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include -DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl@3/include -DOPENSSL_SSL_LIBRARIES=/usr/local/opt/openssl@3/lib/libssl.dylib -DOPENSSL_CRYPTO_LIBRARIES=/usr/local/opt/openssl@3/lib/libcrypto.dylib "
|
||||
;;
|
||||
msys*)
|
||||
OSOPTIONS=" -DMYSQL_INCLUDE_DIR=C:\tools\mysql\current\include -DMYSQL_LIBRARY=C:\tools\mysql\current\lib\mysqlclient.lib "
|
||||
;;
|
||||
esac
|
||||
|
||||
cmake $SRCPATH -DCMAKE_INSTALL_PREFIX=$BINPATH $DCONF \
|
||||
-DAPPS_BUILD=$CAPPS_BUILD \
|
||||
-DTOOLS_BUILD=$CTOOLS_BUILD \
|
||||
-DSCRIPTS=$CSCRIPTS \
|
||||
-DMODULES=$CMODULES \
|
||||
-DBUILD_TESTING=$CBUILD_TESTING \
|
||||
-DUSE_SCRIPTPCH=$CSCRIPTPCH \
|
||||
-DUSE_COREPCH=$CCOREPCH \
|
||||
-DCMAKE_BUILD_TYPE=$CTYPE \
|
||||
-DWITH_WARNINGS=$CWARNINGS \
|
||||
-DCMAKE_C_COMPILER=$CCOMPILERC \
|
||||
-DCMAKE_CXX_COMPILER=$CCOMPILERCXX \
|
||||
$CBUILD_APPS_LIST $CBUILD_TOOLS_LIST $OSOPTIONS $CCUSTOMOPTIONS
|
||||
|
||||
cd $CWD
|
||||
|
||||
runHooks "ON_AFTER_CONFIG"
|
||||
}
|
||||
|
||||
function comp_compile() {
|
||||
[ $MTHREADS == 0 ] && MTHREADS=$(grep -c ^processor /proc/cpuinfo) && MTHREADS=$(($MTHREADS + 2))
|
||||
|
||||
echo "Using $MTHREADS threads"
|
||||
|
||||
pushd "$BUILDPATH" >> /dev/null || exit 1
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
comp_ccacheResetStats
|
||||
|
||||
time cmake --build . --config $CTYPE -j $MTHREADS
|
||||
|
||||
comp_ccacheShowStats
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
msys*)
|
||||
cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
linux*|darwin*)
|
||||
local confDir=${CONFDIR:-"$AC_BINPATH_FULL/../etc"}
|
||||
|
||||
# create the folders before installing to
|
||||
# set the current user and permissions
|
||||
echo "Creating $AC_BINPATH_FULL..."
|
||||
mkdir -p "$AC_BINPATH_FULL"
|
||||
echo "Creating $confDir..."
|
||||
mkdir -p "$confDir"
|
||||
|
||||
echo "Cmake install..."
|
||||
sudo cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
# set all aplications SUID bit
|
||||
echo "Setting permissions on binary files"
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec sudo chown root:root -- {} +
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec sudo chmod u+s -- {} +
|
||||
|
||||
if [[ -n "$DOCKER" ]]; then
|
||||
[[ -f "$confDir/worldserver.conf.dist" ]] && \
|
||||
cp -nv "$confDir/worldserver.conf.dist" "$confDir/worldserver.conf"
|
||||
[[ -f "$confDir/authserver.conf.dist" ]] && \
|
||||
cp -nv "$confDir/authserver.conf.dist" "$confDir/authserver.conf"
|
||||
[[ -f "$confDir/dbimport.conf.dist" ]] && \
|
||||
cp -nv "$confDir/dbimport.conf.dist" "$confDir/dbimport.conf"
|
||||
fi
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
esac
|
||||
|
||||
runHooks "ON_AFTER_BUILD"
|
||||
}
|
||||
|
||||
function comp_build() {
|
||||
comp_configure
|
||||
comp_compile
|
||||
}
|
||||
|
||||
function comp_all() {
|
||||
comp_clean
|
||||
comp_build
|
||||
}
|
23
apps/compiler/includes/includes.sh
Normal file
23
apps/compiler/includes/includes.sh
Normal file
@ -0,0 +1,23 @@
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_COMPILER="$AC_PATH_APPS/compiler"
|
||||
|
||||
if [ -f "$AC_PATH_COMPILER/config.sh" ]; then
|
||||
source "$AC_PATH_COMPILER/config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
function ac_on_after_build() {
|
||||
# move the run engine
|
||||
cp -rvf "$AC_PATH_APPS/startup-scripts/"* "$BINPATH"
|
||||
}
|
||||
|
||||
registerHooks "ON_AFTER_BUILD" ac_on_after_build
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/defines.sh"
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/functions.sh"
|
||||
|
||||
mkdir -p $BUILDPATH
|
||||
mkdir -p $BINPATH
|
22
apps/config-merger/README.md
Normal file
22
apps/config-merger/README.md
Normal file
@ -0,0 +1,22 @@
|
||||
# ==== PHP merger (index.php + merge.php) ====
|
||||
|
||||
This is a PHP script for merging a new .dist file with your existing .conf file (worldserver.conf.dist and authserver.conf.dist)
|
||||
|
||||
It uses sessions so it is multi user safe, it adds any options that are removed to the bottom of the file commented out, just in case it removes something it shouldn't.
|
||||
If you add your custom patch configs below "# Custom" they will be copied exactly as they are.
|
||||
|
||||
Your new config will be found under $basedir/session_id/newconfig.conf.merge
|
||||
|
||||
If you do not run a PHP server on your machiene you can read this guide on ["How to execute PHP code using command line?"](https://www.geeksforgeeks.org/how-to-execute-php-code-using-command-line/) on geeksforgeeks.org.
|
||||
|
||||
```
|
||||
php -S localhost:port -t E:\Azerothcore-wotlk\apps\config-merger\
|
||||
```
|
||||
|
||||
Change port to an available port to use. i.e 8000
|
||||
|
||||
Then go to your browser and type:
|
||||
|
||||
```
|
||||
localhost:8000/index.php
|
||||
```
|
44
apps/config-merger/index.php
Normal file
44
apps/config-merger/index.php
Normal file
@ -0,0 +1,44 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity/AzerothCore Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
?>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=windows-1251">
|
||||
<FORM enctype="multipart/form-data" ACTION="merge.php" METHOD="POST">
|
||||
Dist File (.conf.dist)
|
||||
<br />
|
||||
<INPUT name="File1" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
Current Conf File (.conf)
|
||||
<br />
|
||||
<INPUT name="File2" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="0" CHECKED >Windows -
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="1" >UNIX/Linux
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE="submit" VALUE="Submit">
|
||||
<br />
|
||||
<br />
|
||||
If you have any custom settings, such as from patches,
|
||||
<br />
|
||||
make sure they are at the bottom of the file following
|
||||
<br />
|
||||
this block (add it if it's not there)
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
# Custom
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
<br />
|
||||
|
||||
</FORM>
|
179
apps/config-merger/merge.php
Normal file
179
apps/config-merger/merge.php
Normal file
@ -0,0 +1,179 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
|
||||
error_reporting(0);
|
||||
|
||||
if (!empty($_FILES['File1']) && !empty($_FILES['File2']))
|
||||
{
|
||||
session_id();
|
||||
session_start();
|
||||
$basedir = "merge";
|
||||
$eol = "\r\n";
|
||||
if ($_POST['eol'])
|
||||
$eol = "\n";
|
||||
else
|
||||
$eol = "\r\n";
|
||||
if (!file_exists($basedir))
|
||||
mkdir($basedir);
|
||||
if (!file_exists($basedir."/".session_id()))
|
||||
mkdir($basedir."/".session_id());
|
||||
$upload1 = $basedir."/".session_id()."/".basename($_FILES['File1']['name']);
|
||||
$upload2 = $basedir."/".session_id()."/".basename($_FILES['File2']['name']);
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/worldserver.conf.merge";
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/authserver.conf.merge";
|
||||
else
|
||||
$newconfig = $basedir."/".session_id()."/UnkownConfigFile.conf.merge";
|
||||
|
||||
$out_file = fopen($newconfig, "w");
|
||||
$success = false;
|
||||
if (move_uploaded_file($_FILES['File1']['tmp_name'], $upload1))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
if (move_uploaded_file($_FILES['File2']['tmp_name'], $upload2))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
|
||||
if ($success)
|
||||
{
|
||||
$custom_found = false;
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$in_file2 = fopen($upload2,"r");
|
||||
$array1 = array();
|
||||
$array2 = array();
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if ((substr($line,0,1) != '#' && substr($line,0,1) != ''))
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array1[$key] = $val;
|
||||
}
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2) && !$custom_found)
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array2[$key] = $val;
|
||||
}
|
||||
if (strtolower($line) == "# custom")
|
||||
$custom_found = true;
|
||||
else
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
fclose($in_file1);
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
{
|
||||
$array1[$k] = $v;
|
||||
unset($array2[$k]);
|
||||
}
|
||||
}
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
$array = array();
|
||||
while (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array[$key] = $val;
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
foreach($array as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
fwrite($out_file, $k."=".$array1[$k].$eol);
|
||||
else
|
||||
continue;
|
||||
}
|
||||
unset($array);
|
||||
if (!feof($in_file1))
|
||||
fwrite($out_file, $line.$eol);
|
||||
}
|
||||
else
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
if ($custom_found)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# Custom".$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2))
|
||||
{
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
}
|
||||
$first = true;
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if ($first)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# The Following values were removed from the config.".$eol);
|
||||
$first = false;
|
||||
}
|
||||
fwrite($out_file, "# ".$k."=".$v.$eol);
|
||||
}
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
|
||||
unset($array1);
|
||||
unset($array2);
|
||||
fclose($in_file1);
|
||||
fclose($in_file2);
|
||||
fclose($out_file);
|
||||
unlink($upload1);
|
||||
unlink($upload2);
|
||||
|
||||
echo "Process done";
|
||||
echo "<br /><a href=".$newconfig.">Click here to retrieve your merged conf</a>";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
echo "An error has occurred";
|
||||
}
|
||||
?>
|
1
apps/db_exporter/.gitignore
vendored
Normal file
1
apps/db_exporter/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
config.sh
|
12
apps/db_exporter/README.md
Normal file
12
apps/db_exporter/README.md
Normal file
@ -0,0 +1,12 @@
|
||||
This script is used by devs to export the databases to base directories
|
||||
|
||||
You should use it on clean databases
|
||||
|
||||
## USAGE
|
||||
|
||||
NOTE: this script is only working under unix currently
|
||||
|
||||
1) You must create a config.sh file changing DB connection configurations
|
||||
of /conf/config.sh.dist
|
||||
|
||||
2) Run the db_export.sh script and wait
|
52
apps/db_exporter/db_export.sh
Normal file
52
apps/db_exporter/db_export.sh
Normal file
@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ROOTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../../" && pwd )"
|
||||
|
||||
source "$ROOTPATH/apps/bash_shared/includes.sh"
|
||||
|
||||
if [ -f "./config.sh" ]; then
|
||||
source "./config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
echo "This is a dev-only procedure to export the DB into the SQL base files. All base files will be overwritten."
|
||||
read -p "Are you sure you want to continue (y/N)? " choice
|
||||
case "$choice" in
|
||||
y|Y ) echo "Exporting the DB into the SQL base files...";;
|
||||
* ) return;;
|
||||
esac
|
||||
|
||||
echo "===== STARTING PROCESS ====="
|
||||
|
||||
|
||||
function export() {
|
||||
echo "Working on: "$1
|
||||
database=$1
|
||||
|
||||
var_base_path="DB_"$database"_PATHS"
|
||||
base_path=${!var_base_path%/}
|
||||
|
||||
base_conf="TPATH="$base_path";\
|
||||
CLEANFOLDER=1; \
|
||||
CHMODE=0; \
|
||||
TEXTDUMPS=0; \
|
||||
PARSEDUMP=1; \
|
||||
FULL=0; \
|
||||
DUMPOPTS='--skip-comments --skip-set-charset --routines --extended-insert --order-by-primary --single-transaction --quick'; \
|
||||
"
|
||||
|
||||
var_base_conf="DB_"$database"_CONF"
|
||||
base_conf=$base_conf${!var_base_conf}
|
||||
|
||||
var_base_name="DB_"$database"_NAME"
|
||||
base_name=${!var_base_name}
|
||||
|
||||
|
||||
bash "$AC_PATH_DEPS/acore/mysql-tools/mysql-tools" "dump" "" "$base_name" "" "$base_conf"
|
||||
}
|
||||
|
||||
for db in ${DATABASES[@]}
|
||||
do
|
||||
export "$db"
|
||||
done
|
||||
|
||||
echo "===== DONE ====="
|
344
apps/docker/Dockerfile
Normal file
344
apps/docker/Dockerfile
Normal file
@ -0,0 +1,344 @@
|
||||
#syntax=docker/dockerfile:1.2
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# DEV: Stage used for the development environment
|
||||
# and the locally built services
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as base
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC base image for dev containers"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
ENV DOCKER=1
|
||||
|
||||
# Ensure ac-dev-server can properly pull versions
|
||||
ENV GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Do not use acore dashboard to install
|
||||
# since it's not cacheable by docker
|
||||
RUN apt-get update && apt-get install -y gdb gdbserver git dos2unix lsb-core sudo curl unzip \
|
||||
make cmake clang libmysqlclient-dev \
|
||||
libboost-system1.7*-dev libboost-filesystem1.7*-dev libboost-program-options1.7*-dev libboost-iostreams1.7*-dev \
|
||||
build-essential libtool cmake-data openssl libgoogle-perftools-dev google-perftools \
|
||||
libssl-dev libmysql++-dev libreadline6-dev zlib1g-dev libbz2-dev mysql-client \
|
||||
libncurses5-dev ccache \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Ensure git will work with the AzerothCore source directory
|
||||
RUN git config --global --add safe.directory /azerothcore
|
||||
|
||||
# change timezone in container
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# must be created to set the correct permissions on them
|
||||
RUN mkdir -p /azerothcore/env/dist/bin
|
||||
RUN mkdir -p /azerothcore/env/dist/data/Cameras
|
||||
RUN mkdir -p /azerothcore/env/dist/data/dbc
|
||||
RUN mkdir -p /azerothcore/env/dist/data/maps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/mmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/vmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/logs
|
||||
RUN mkdir -p /azerothcore/env/dist/temp
|
||||
RUN mkdir -p /azerothcore/env/dist/etc
|
||||
RUN mkdir -p /azerothcore/var/build/obj
|
||||
|
||||
# Correct permissions for non-root operations
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /run
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /opt
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# copy only necessary files for the acore dashboard
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER apps /azerothcore/apps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER bin /azerothcore/bin
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER conf /azerothcore/conf
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER data /azerothcore/data
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER deps /azerothcore/deps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.json /azerothcore/acore.json
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.sh /azerothcore/acore.sh
|
||||
|
||||
# Download deno and make sure the dashboard works
|
||||
RUN bash /azerothcore/acore.sh quit
|
||||
|
||||
WORKDIR /azerothcore
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# Dev: create dev server image
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM base as dev
|
||||
|
||||
LABEL description="AC dev image for dev containers"
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# copy everything so we can work directly within the container
|
||||
# using tools such as vscode dev-container
|
||||
# NOTE: this folder is different by the /azerothcore (which is binded instead)
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER . /azerothcore
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# SERVICE BASE: prepare the OS for the production-ready services
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as servicebase
|
||||
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC service image for server applications"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# install the required dependencies to run the server
|
||||
RUN apt-get update && apt-get install -y dos2unix gdb gdbserver google-perftools libgoogle-perftools-dev net-tools \
|
||||
libboost-system1.7*-dev libboost-filesystem1.7*-dev libboost-program-options1.7*-dev libboost-iostreams1.7*-dev \
|
||||
tzdata libmysqlclient-dev mysql-client curl unzip && rm -rf /var/lib/apt/lists/* ;
|
||||
|
||||
# change timezone in container
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
|
||||
|
||||
# Correct permissions for non-root operations
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /run
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /opt
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=base /azerothcore /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# must be created to avoid permissions errors
|
||||
RUN mkdir -p /azerothcore/env/dist/data/Cameras
|
||||
RUN mkdir -p /azerothcore/env/dist/data/dbc
|
||||
RUN mkdir -p /azerothcore/env/dist/data/maps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/mmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/vmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/logs
|
||||
RUN mkdir -p /azerothcore/env/dist/etc
|
||||
RUN mkdir -p /azerothcore/env/dist/bin
|
||||
|
||||
# Download deno and make sure the dashboard works
|
||||
RUN bash /azerothcore/acore.sh quit
|
||||
|
||||
WORKDIR /azerothcore/
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# AUTH & WORLD local: images used for local services
|
||||
# These images don't include binaries by default
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM servicebase as authserver-local
|
||||
|
||||
LABEL description="AC authserver image for local environment"
|
||||
|
||||
CMD ./acore.sh run-authserver
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
FROM servicebase as worldserver-local
|
||||
|
||||
LABEL description="AC worldserver image for local environment"
|
||||
|
||||
CMD ./acore.sh run-worldserver
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# BUILD: compile sources
|
||||
#
|
||||
#=================================================================
|
||||
FROM base as build
|
||||
|
||||
ARG DOCKER_USER=acore
|
||||
USER $DOCKER_USER
|
||||
|
||||
LABEL description="AC Image used by the build stage to generate production images"
|
||||
|
||||
RUN mkdir -p /azerothcore/env/etc/
|
||||
|
||||
# .git is needed by the compiler
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./.git /azerothcore/.git
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./CMakeLists.txt /azerothcore/CMakeLists.txt
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./deps /azerothcore/deps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./src /azerothcore/src
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./modules /azerothcore/modules
|
||||
# check if we have ccache files available outside
|
||||
RUN rm -rf /azerothcore/var/ccache/*
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER var/docker/ccache /azerothcore/var/ccache
|
||||
|
||||
# install eluna
|
||||
RUN git clone --depth=1 --branch=master https://github.com/azerothcore/mod-eluna.git /azerothcore/modules/mod-eluna
|
||||
|
||||
ENV USER_CONF_PATH=/azerothcore/apps/docker/config-docker.sh
|
||||
ENV CTYPE=RelWithDebInfo
|
||||
ENV AC_CCACHE=true
|
||||
ENV CCACHE_CPP2=true
|
||||
ENV CSCRIPTPCH=OFF
|
||||
ENV CCOREPCH=OFF
|
||||
ENV CTOOLS_BUILD=all
|
||||
# ENV CTOOLS_BUILD=maps-only
|
||||
ENV CSCRIPTS=static
|
||||
RUN bash apps/docker/docker-build-prod.sh
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# AUTH SERVICE: create a ready-to-use authserver image
|
||||
# with binaries included
|
||||
#
|
||||
#=================================================================
|
||||
FROM authserver-local as authserver
|
||||
|
||||
LABEL description="AC Production: authserver"
|
||||
|
||||
ARG DOCKER_USER=acore
|
||||
USER $DOCKER_USER
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/etc /azerothcore/env/dist/etc
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/authserver /azerothcore/env/dist/bin/authserver
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# WORLD SERVICE: create a ready-to-use worldserver image
|
||||
# with binaries and data included
|
||||
#
|
||||
#=================================================================
|
||||
FROM worldserver-local as worldserver
|
||||
|
||||
LABEL description="AC Production: worldserver"
|
||||
|
||||
ARG DOCKER_USER=acore
|
||||
USER $DOCKER_USER
|
||||
|
||||
RUN mkdir -p /azerothcore/env/dist/bin/lua_scripts
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/etc /azerothcore/env/dist/etc
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/worldserver /azerothcore/env/dist/bin/worldserver
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/lua_scripts /azerothcore/env/dist/bin/lua_scripts
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/dbimport /azerothcore/env/dist/bin/dbimport
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# CLIENT DATA
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as client-data
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC Production: client-data"
|
||||
|
||||
RUN apt-get update && apt-get install -y tzdata curl unzip && rm -rf /var/lib/apt/lists/* ;
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# ENV DATAPATH=/azerothcore/env/dist/data-temp
|
||||
ENV DATAPATH=/azerothcore/env/dist/data
|
||||
ENV DATAPATH_ZIP=/tmp/data.zip
|
||||
|
||||
RUN mkdir -p "$DATAPATH"
|
||||
ARG CACHEBUST=1
|
||||
# RUN --mount=type=bind,target=/azerothcore-temp,readwrite --mount=type=cache,target=/azerothcore/env/dist/data-temp /azerothcore-temp/acore.sh client-data && cp -rT /azerothcore/env/dist/data-temp/ /azerothcore/env/dist/data && chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
RUN --mount=type=bind,target=/azerothcore-temp,readwrite /azerothcore-temp/acore.sh client-data && chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# TOOLS
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as tools
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC Production: tools"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y libmysqlclient-dev libssl-dev libbz2-dev \
|
||||
libboost-system1.7*-dev libboost-filesystem1.7*-dev libboost-program-options1.7*-dev libboost-iostreams1.7*-dev \
|
||||
sudo && rm -rf /var/lib/apt/lists/* ;
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
RUN mkdir -p /azerothcore/env/client/
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
WORKDIR /azerothcore/env/client/
|
||||
|
||||
RUN mkdir -p /azerothcore/env/client/Cameras
|
||||
RUN mkdir -p /azerothcore/env/client/dbc
|
||||
RUN mkdir -p /azerothcore/env/client/maps
|
||||
RUN mkdir -p /azerothcore/env/client/mmaps
|
||||
RUN mkdir -p /azerothcore/env/client/vmaps
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/map_extractor /azerothcore/env/client/map_extractor
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/mmaps_generator /azerothcore/env/client/mmaps_generator
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/vmap4_assembler /azerothcore/env/client/vmap4_assembler
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/vmap4_extractor /azerothcore/env/client/vmap4_extractor
|
||||
|
27
apps/docker/README.md
Normal file
27
apps/docker/README.md
Normal file
@ -0,0 +1,27 @@
|
||||
# Run AzerothCore with Docker
|
||||
|
||||
*This readme it's a summary of the AzerothCore docker features.*
|
||||
|
||||
Docker. is a software that performs operating-system-level virtualization, allowing to wrap and launch applications inside containers.
|
||||
|
||||
Thanks to Docker, you can quickly setup and run AzerothCore in any operating system.
|
||||
|
||||
The **only** requirement is having [Docker](https://docs.docker.com/install/) installed into your system. Forget about installing mysql, visual studio, cmake, etc...
|
||||
|
||||
### Installation instructions
|
||||
|
||||
Check the [Install with Docker](https://www.azerothcore.org/wiki/Install-with-Docker) guide.
|
||||
|
||||
### Memory usage
|
||||
|
||||
The total amount of RAM when running all AzerothCore docker containers is **less than 2 GB**.
|
||||
|
||||

|
||||
|
||||
|
||||
### Docker containers vs Virtual machines
|
||||
|
||||
Using Docker will have the same benefits as using virtual machines, but with much less overhead:
|
||||
|
||||

|
||||
|
8
apps/docker/config-docker.sh
Normal file
8
apps/docker/config-docker.sh
Normal file
@ -0,0 +1,8 @@
|
||||
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
CTOOLS_BUILD=all
|
||||
|
||||
# allow the user to override configs
|
||||
if [ -f "$AC_PATH_CONF/config.sh" ]; then
|
||||
source "$AC_PATH_CONF/config.sh" # should overwrite previous
|
||||
fi
|
14
apps/docker/docker-build-dev.sh
Normal file
14
apps/docker/docker-build-dev.sh
Normal file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CUR_PATH/docker-build-prod.sh"
|
||||
|
||||
echo "Fixing EOL..."
|
||||
# using -n (new file mode) should also fix the issue
|
||||
# when the file is created with the default acore user but you
|
||||
# set a different user into the docker configurations
|
||||
for file in "env/dist/etc/"*
|
||||
do
|
||||
dos2unix -n $file $file
|
||||
done
|
5
apps/docker/docker-build-prod.sh
Normal file
5
apps/docker/docker-build-prod.sh
Normal file
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd /azerothcore
|
||||
|
||||
bash acore.sh compiler build
|
180
apps/docker/docker-cmd.sh
Normal file
180
apps/docker/docker-cmd.sh
Normal file
@ -0,0 +1,180 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO(michaeldelago) decide if we need a wrapper like this around docker
|
||||
# commands.
|
||||
#
|
||||
# Running the docker commands should be simple and familiar.
|
||||
# Introducting extra steps through the dashboard can cause issues with people
|
||||
# getting started, especially if they already know docker.
|
||||
#
|
||||
# If a new user knows docker, they will feel (pretty close) to right at home.
|
||||
# If a new user doesn't know docker, it's easy to learn and the knowledge
|
||||
# applies to much more than azerothcore
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
COMPOSE_DOCKER_CLI_BUILD="1"
|
||||
DOCKER_BUILDKIT="1"
|
||||
# BUILDKIT_INLINE_CACHE="1"
|
||||
|
||||
function usage () {
|
||||
cat <<EOF
|
||||
Wrapper for shell scripts around docker
|
||||
|
||||
usage: $(basename $0) ACTION [ ACTION... ] [ ACTION_ARG... ]
|
||||
|
||||
actions:
|
||||
EOF
|
||||
# the `-s` will remove the "#" and properly space the action and description
|
||||
cat <<EOF | column -t -l2 -s'#'
|
||||
> start:app # Start the development worldserver and authserver
|
||||
> start:app:d # Start the development worldserver and authserver in detached mode
|
||||
> build # build the development worldserver and authserver
|
||||
> pull # pull the development worldserver and authserver
|
||||
> build:nocache # build the development worldserver and authserver without cache
|
||||
> clean:build # clean build artifacts from the dev server
|
||||
> client-data # download client data in the dev server
|
||||
> dev:up start # the dev server
|
||||
> dev:build # compile azerothcore using the dev server
|
||||
> dev:dash # execute the dashboard in the dev server container
|
||||
> dev:shell [ ARGS... ] # open a bash shell in the dev server
|
||||
> prod:build # Build the service containers used by acore-docker
|
||||
> prod:pull # Pull the containers used by acore-docker
|
||||
> prod:up # Start the services used by acore-docker
|
||||
> prod:up:d # start the services used by acore-docker in the background
|
||||
> attach SERVICE # attach to a service currently running in docker compose
|
||||
EOF
|
||||
}
|
||||
|
||||
# If no args, just spit usage and exit
|
||||
[[ $# -eq 0 ]] && usage && exit
|
||||
|
||||
# loop through commands passed
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
start:app)
|
||||
set -x
|
||||
docker compose --profile app up
|
||||
set +x
|
||||
# pop the head off of the queue of args
|
||||
# After this, the value of $1 is the value of $2
|
||||
shift
|
||||
;;
|
||||
|
||||
start:app:d)
|
||||
set -x
|
||||
docker compose --profile app up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build)
|
||||
set -x
|
||||
docker compose --profile local --profile dev --profile dev-build build
|
||||
docker compose --profile dev-build run --rm --no-deps ac-dev-build /bin/bash /azerothcore/apps/docker/docker-build-dev.sh
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull)
|
||||
set -x
|
||||
docker compose --profile local --profile dev --profile dev-build pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:nocache)
|
||||
set -x
|
||||
docker compose --profile local --profile dev --profile dev-build build --no-cache
|
||||
docker compose run --rm --no-deps ac-dev-build /bin/bash /azerothcore/apps/docker/docker-build-dev.sh
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
clean:build)
|
||||
set -x
|
||||
docker compose run --rm --no-deps ac-dev-server bash acore.sh compiler clean
|
||||
docker compose run --rm --no-deps ac-dev-server bash acore.sh compiler ccacheClean
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
client-data)
|
||||
set -x
|
||||
docker compose run --rm --no-deps ac-dev-server bash acore.sh client-data
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:up)
|
||||
set -x
|
||||
docker compose up -d ac-dev-server
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:build)
|
||||
set -x
|
||||
docker compose run --rm ac-dev-server bash acore.sh compiler build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:dash)
|
||||
set -x
|
||||
docker compose run --rm ac-dev-server bash /azerothcore/acore.sh ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:shell)
|
||||
set -x
|
||||
docker compose up -d ac-dev-server
|
||||
docker compose exec ac-dev-server bash ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:prod|prod:build)
|
||||
set -x
|
||||
docker compose --profile prod build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull:prod|prod:pull)
|
||||
set -x
|
||||
docker compose --profile prod pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up|start:prod)
|
||||
set -x
|
||||
docker compose --profile prod-app up
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up:d|start:prod:d)
|
||||
set -x
|
||||
docker compose --profile prod-app up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
attach)
|
||||
SERVICE="$2"
|
||||
set -x
|
||||
docker compose attach "$SERVICE"
|
||||
set +x
|
||||
shift
|
||||
shift # Second to pass the argument
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown or empty arg"
|
||||
usage
|
||||
exit 1
|
||||
esac
|
||||
done
|
83
apps/extractor/extractor.bat
Normal file
83
apps/extractor/extractor.bat
Normal file
@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO WARNING! when extracting the vmaps extractor will
|
||||
ECHO output the text below, it's intended and not an error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Press 1, 2, 3 or 4 to start extracting or 5 to exit.
|
||||
ECHO 1 - Extract base files (NEEDED) and cameras.
|
||||
ECHO 2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)
|
||||
ECHO 3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)
|
||||
ECHO 4 - Extract all (may take hours)
|
||||
ECHO 5 - EXIT
|
||||
ECHO.
|
||||
SET /P M=Type 1, 2, 3, 4 or 5 then press ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
83
apps/extractor/extractor_es.bat
Normal file
83
apps/extractor/extractor_es.bat
Normal file
@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESIONE 1, 2, 3 O 4 para seleccionar su tarea, o 5 para SALIR.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO ADVERTENCIA: al extraer los vmaps del extractor
|
||||
ECHO la salida del texto de abajo, es intencional y no un error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Pulse 1, 2, 3 o 4 para iniciar la extraccion o 5 para salir.
|
||||
ECHO 1 - Extraer los archivos base (NECESARIOS) y las cámaras.
|
||||
ECHO 2 - Extraer vmaps (necesita que los mapas se extraigan antes de ejecutar esto) (OPCIONAL, muy recomendable)
|
||||
ECHO 3 - Extraer mmaps (necesita que los vmaps se extraigan antes de ejecutar esto, puede llevar horas) (OPCIONAL, muy recomendable)
|
||||
ECHO 4 - Extraer todo (puede llevar varias horas)
|
||||
ECHO 5 - SALIR
|
||||
ECHO.
|
||||
SET /P M=Escriba 1, 2, 3, 4 o 5 y pulse ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
5
apps/git_tools/setup_git_commit_template.sh
Normal file
5
apps/git_tools/setup_git_commit_template.sh
Normal file
@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
## Set a local git commit template
|
||||
git config --local commit.template ".git_commit_template.txt" ;
|
||||
echo "--- Successfully set the default commit template for this repository only. Verify with: git config -e"
|
34
apps/git_tools/subrepo-update.sh
Normal file
34
apps/git_tools/subrepo-update.sh
Normal file
@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#######################
|
||||
#
|
||||
# README
|
||||
#
|
||||
# This script is used to automatically update
|
||||
# submodules and subrepos included in this project
|
||||
# Subrepo are updated in bidirectional way (pull + push)
|
||||
# because they are intended to be developed by this organization
|
||||
#
|
||||
# NOTE: only maintainers and CI should run this script and
|
||||
# keep it updated
|
||||
#
|
||||
#######################
|
||||
|
||||
set -e
|
||||
ROOT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../"
|
||||
# update all submodules
|
||||
git submodule update --init --recursive
|
||||
git submodule foreach git pull origin master
|
||||
# include libraries for git subrepo
|
||||
source "$ROOT_PATH/deps/git-subrepo/.rc"
|
||||
source "$ROOT_PATH/deps/acore/bash-lib/src/git-utils/subrepo.sh"
|
||||
|
||||
echo "> Pulling and update all subrepos"
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/bash-lib master deps/acore/bash-lib
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/cmake-utils master deps/acore/cmake-utils
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/mysql-tools master deps/acore/mysql-tools
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/joiner master deps/acore/joiner
|
1319
apps/grafana/1_General.json
Normal file
1319
apps/grafana/1_General.json
Normal file
File diff suppressed because it is too large
Load Diff
691
apps/grafana/2_Maps.json
Normal file
691
apps/grafana/2_Maps.json
Normal file
@ -0,0 +1,691 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
},
|
||||
{
|
||||
"datasource": "Influx",
|
||||
"enable": true,
|
||||
"iconColor": "#C0C6BE",
|
||||
"iconSize": 13,
|
||||
"lineColor": "rgba(255, 96, 96, 0.592157)",
|
||||
"name": "Global Events",
|
||||
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
|
||||
"showLine": true,
|
||||
"textColumn": "text",
|
||||
"titleColumn": "title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 6,
|
||||
"iteration": 1595939001794,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 2,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [
|
||||
{
|
||||
"alias": "Unload tile",
|
||||
"transform": "negative-Y"
|
||||
}
|
||||
],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Load tile",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'LoadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
},
|
||||
{
|
||||
"alias": "Unload tile",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'UnloadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Map",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Pathfinding queries",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"mmap_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'CalculatePath' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "MMap",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": null,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 5,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 14
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 4,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_id"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_instanceid"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "map_creatures",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/^$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Creatures",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": null,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 5,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 5,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_id"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_instanceid"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "map_gameobjects",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/^$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Gameobjects",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"refresh": "1m",
|
||||
"schemaVersion": 25,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allFormat": "regex values",
|
||||
"allValue": null,
|
||||
"current": {
|
||||
"text": "Acore",
|
||||
"value": "Acore"
|
||||
},
|
||||
"datasource": "Influx",
|
||||
"definition": "",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"multiFormat": "regex values",
|
||||
"name": "realm",
|
||||
"options": [],
|
||||
"query": "show tag values from events with key = realm",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Maps, vmaps and mmaps",
|
||||
"uid": "6IhqWiWGz",
|
||||
"version": 2
|
||||
}
|
280
apps/grafana/3_Network.json
Normal file
280
apps/grafana/3_Network.json
Normal file
@ -0,0 +1,280 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
},
|
||||
{
|
||||
"datasource": "Influx",
|
||||
"enable": true,
|
||||
"iconColor": "#C0C6BE",
|
||||
"iconSize": 13,
|
||||
"lineColor": "rgba(255, 96, 96, 0.592157)",
|
||||
"name": "Global Events",
|
||||
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
|
||||
"showLine": true,
|
||||
"textColumn": "text",
|
||||
"titleColumn": "title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 7,
|
||||
"iteration": 1595939048589,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Processed packets",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "processed_packets",
|
||||
"query": "SELECT sum(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "sum"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/$realm$/"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"alias": "Processed packets / mean per session",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "processed_packets",
|
||||
"query": "SELECT mean(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Processed packets",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"refresh": "1m",
|
||||
"schemaVersion": 25,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allFormat": "regex values",
|
||||
"allValue": null,
|
||||
"current": {
|
||||
"text": "Acore",
|
||||
"value": "Acore"
|
||||
},
|
||||
"datasource": "Influx",
|
||||
"definition": "",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"multiFormat": "regex values",
|
||||
"name": "realm",
|
||||
"options": [],
|
||||
"query": "show tag values from events with key = realm",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Network",
|
||||
"uid": "_QtkMmWMk",
|
||||
"version": 2
|
||||
}
|
1677
apps/grafana/4_Performance_profiling.json
Normal file
1677
apps/grafana/4_Performance_profiling.json
Normal file
File diff suppressed because it is too large
Load Diff
253
apps/installer/includes/functions.sh
Normal file
253
apps/installer/includes/functions.sh
Normal file
@ -0,0 +1,253 @@
|
||||
function inst_configureOS() {
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
solaris*) echo "Solaris is not supported yet" ;;
|
||||
darwin*) source "$AC_PATH_INSTALLER/includes/os_configs/osx.sh" ;;
|
||||
linux*)
|
||||
# If $OSDISTRO is set, use this value (from config.sh)
|
||||
if [ ! -z "$OSDISTRO" ]; then
|
||||
DISTRO=$OSDISTRO
|
||||
# If available, use LSB to identify distribution
|
||||
elif command -v lsb_release >/dev/null 2>&1 ; then
|
||||
DISTRO=$(lsb_release -is)
|
||||
# Otherwise, use release info file
|
||||
else
|
||||
DISTRO=$(ls -d /etc/[A-Za-z]*[_-][rv]e[lr]* | grep -v "lsb" | cut -d'/' -f3 | cut -d'-' -f1 | cut -d'_' -f1)
|
||||
fi
|
||||
|
||||
case $DISTRO in
|
||||
# add here distro that are debian or ubuntu based
|
||||
# TODO: find a better way, maybe checking the existance
|
||||
# of a package manager
|
||||
"neon" | "ubuntu" | "Ubuntu")
|
||||
DISTRO="ubuntu"
|
||||
;;
|
||||
"debian" | "Debian")
|
||||
DISTRO="debian"
|
||||
;;
|
||||
*)
|
||||
echo "Distro: $DISTRO, is not supported. If your distribution is based on debian or ubuntu,
|
||||
please set the 'OSDISTRO' environment variable to one of these distro (you can use config.sh file)"
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
DISTRO=${DISTRO,,}
|
||||
|
||||
echo "Distro: $DISTRO"
|
||||
|
||||
# TODO: implement different configurations by distro
|
||||
source "$AC_PATH_INSTALLER/includes/os_configs/$DISTRO.sh"
|
||||
;;
|
||||
bsd*) echo "BSD is not supported yet" ;;
|
||||
msys*) source "$AC_PATH_INSTALLER/includes/os_configs/windows.sh" ;;
|
||||
*) echo "This platform is not supported" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
function inst_updateRepo() {
|
||||
cd "$AC_PATH_ROOT"
|
||||
if [ ! -z $INSTALLER_PULL_FROM ]; then
|
||||
git pull "$ORIGIN_REMOTE" "$INSTALLER_PULL_FROM"
|
||||
else
|
||||
git pull "$ORIGIN_REMOTE" $(git rev-parse --abbrev-ref HEAD)
|
||||
fi
|
||||
}
|
||||
|
||||
function inst_resetRepo() {
|
||||
cd "$AC_PATH_ROOT"
|
||||
git reset --hard $(git rev-parse --abbrev-ref HEAD)
|
||||
git clean -f
|
||||
}
|
||||
|
||||
function inst_compile() {
|
||||
comp_configure
|
||||
comp_build
|
||||
}
|
||||
|
||||
function inst_cleanCompile() {
|
||||
comp_clean
|
||||
inst_compile
|
||||
}
|
||||
|
||||
function inst_allInOne() {
|
||||
inst_configureOS
|
||||
inst_compile
|
||||
dbasm_import true true true
|
||||
}
|
||||
|
||||
function inst_getVersionBranch() {
|
||||
local res="master"
|
||||
local v="not-defined"
|
||||
local MODULE_MAJOR=0
|
||||
local MODULE_MINOR=0
|
||||
local MODULE_PATCH=0
|
||||
local MODULE_SPECIAL=0;
|
||||
local ACV_MAJOR=0
|
||||
local ACV_MINOR=0
|
||||
local ACV_PATCH=0
|
||||
local ACV_SPECIAL=0;
|
||||
local curldata=$(curl -f --silent -H 'Cache-Control: no-cache' "$1" || echo "{}")
|
||||
local parsed=$(echo "$curldata" | "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.compatibility.*.[version,branch]')
|
||||
|
||||
semverParseInto "$ACORE_VERSION" ACV_MAJOR ACV_MINOR ACV_PATCH ACV_SPECIAL
|
||||
|
||||
if [[ ! -z "$parsed" ]]; then
|
||||
readarray -t vers < <(echo "$parsed")
|
||||
local idx
|
||||
res="none"
|
||||
# since we've the pair version,branch alternated in not associative and one-dimensional
|
||||
# array, we've to simulate the association with length/2 trick
|
||||
for idx in `seq 0 $((${#vers[*]}/2-1))`; do
|
||||
semverParseInto "${vers[idx*2]}" MODULE_MAJOR MODULE_MINOR MODULE_PATCH MODULE_SPECIAL
|
||||
if [[ $MODULE_MAJOR -eq $ACV_MAJOR && $MODULE_MINOR -le $ACV_MINOR ]]; then
|
||||
res="${vers[idx*2+1]}"
|
||||
v="${vers[idx*2]}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "$v" "$res"
|
||||
}
|
||||
|
||||
function inst_module_search {
|
||||
|
||||
local res="$1"
|
||||
local idx=0;
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type what to search or leave blank for full list"
|
||||
read -p "Insert name: " res
|
||||
fi
|
||||
|
||||
local search="+$res"
|
||||
|
||||
echo "Searching $res..."
|
||||
echo "";
|
||||
|
||||
readarray -t MODS < <(curl --silent "https://api.github.com/search/repositories?q=org%3Aazerothcore${search}+fork%3Atrue+topic%3Acore-module+sort%3Astars&type=" \
|
||||
| "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.items.*.name')
|
||||
while (( ${#MODS[@]} > idx )); do
|
||||
mod="${MODS[idx++]}"
|
||||
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$mod/master/acore-module.json")
|
||||
|
||||
if [[ "$b" != "none" ]]; then
|
||||
echo "-> $mod (tested with AC version: $v)"
|
||||
else
|
||||
echo "-> $mod (no revision available for AC v$AC_VERSION, it could not work!)"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
function inst_module_install {
|
||||
local res
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type the name of the module to install"
|
||||
read -p "Insert name: " res
|
||||
else
|
||||
res="$1"
|
||||
fi
|
||||
|
||||
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$res/master/acore-module.json")
|
||||
|
||||
if [[ "$b" != "none" ]]; then
|
||||
Joiner:add_repo "https://github.com/azerothcore/$res" "$res" "$b" && echo "Done, please re-run compiling and db assembly. Read instruction on module repository for more information"
|
||||
else
|
||||
echo "Cannot install $res module: it doesn't exists or no version compatible with AC v$ACORE_VERSION are available"
|
||||
fi
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
function inst_module_update {
|
||||
local res;
|
||||
local _tmp;
|
||||
local branch;
|
||||
local p;
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type the name of the module to update"
|
||||
read -p "Insert name: " res
|
||||
else
|
||||
res="$1"
|
||||
fi
|
||||
|
||||
_tmp=$PWD
|
||||
|
||||
if [ -d "$J_PATH_MODULES/$res/" ]; then
|
||||
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$res/master/acore-module.json")
|
||||
|
||||
cd "$J_PATH_MODULES/$res/"
|
||||
|
||||
# use current branch if something wrong with json
|
||||
if [[ "$v" == "none" || "$v" == "not-defined" ]]; then
|
||||
b=`git rev-parse --abbrev-ref HEAD`
|
||||
fi
|
||||
|
||||
Joiner:upd_repo "https://github.com/azerothcore/$res" "$res" "$b" && echo "Done, please re-run compiling and db assembly" || echo "Cannot update"
|
||||
cd $_tmp
|
||||
else
|
||||
echo "Cannot update! Path doesn't exist"
|
||||
fi;
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
function inst_module_remove {
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type the name of the module to remove"
|
||||
read -p "Insert name: " res
|
||||
else
|
||||
res="$1"
|
||||
fi
|
||||
|
||||
Joiner:remove "$res" && echo "Done, please re-run compiling" || echo "Cannot remove"
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
|
||||
function inst_simple_restarter {
|
||||
echo "Running $1 ..."
|
||||
bash "$AC_PATH_APPS/startup-scripts/simple-restarter" "$AC_BINPATH_FULL" "$1"
|
||||
echo
|
||||
#disown -a
|
||||
#jobs -l
|
||||
}
|
||||
|
||||
function inst_download_client_data {
|
||||
# change the following version when needed
|
||||
local VERSION=v16
|
||||
|
||||
echo "#######################"
|
||||
echo "Client data downloader"
|
||||
echo "#######################"
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
local path="${DATAPATH:-$AC_BINPATH_FULL}"
|
||||
local zipPath="${DATAPATH_ZIP:-"$path/data.zip"}"
|
||||
|
||||
dataVersionFile="$path/data-version"
|
||||
|
||||
[ -f "$dataVersionFile" ] && source "$dataVersionFile"
|
||||
|
||||
# create the path if doesn't exists
|
||||
mkdir -p "$path"
|
||||
|
||||
if [ "$VERSION" == "$INSTALLED_VERSION" ]; then
|
||||
echo "Data $VERSION already installed. If you want to force the download remove the following file: $dataVersionFile"
|
||||
return
|
||||
fi
|
||||
|
||||
echo "Downloading client data in: $zipPath ..."
|
||||
curl -L https://github.com/wowgaming/client-data/releases/download/$VERSION/data.zip > "$zipPath" \
|
||||
&& echo "unzip downloaded file in $path..." && unzip -q -o "$zipPath" -d "$path/" \
|
||||
&& echo "Remove downloaded file" && rm "$zipPath" \
|
||||
&& echo "INSTALLED_VERSION=$VERSION" > "$dataVersionFile"
|
||||
}
|
22
apps/installer/includes/includes.sh
Normal file
22
apps/installer/includes/includes.sh
Normal file
@ -0,0 +1,22 @@
|
||||
[[ ${INSTALLER_GUARDYVAR:-} -eq 1 ]] && return || readonly INSTALLER_GUARDYVAR=1 # include it once
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd )
|
||||
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_INSTALLER="$AC_PATH_APPS/installer"
|
||||
|
||||
J_PATH="$AC_PATH_DEPS/acore/joiner"
|
||||
J_PATH_MODULES="$AC_PATH_MODULES"
|
||||
|
||||
source "$J_PATH/joiner.sh"
|
||||
|
||||
if [ -f "$AC_PATH_INSTALLER/config.sh" ]; then
|
||||
source "$AC_PATH_INSTALLER/config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
source "$AC_PATH_APPS/compiler/includes/includes.sh"
|
||||
|
||||
source "$AC_PATH_DEPS/semver_bash/semver.sh"
|
||||
|
||||
source "$AC_PATH_INSTALLER/includes/functions.sh"
|
18
apps/installer/includes/os_configs/debian.sh
Normal file
18
apps/installer/includes/os_configs/debian.sh
Normal file
@ -0,0 +1,18 @@
|
||||
if ! command -v lsb_release &>/dev/null ; then
|
||||
sudo apt-get install -y lsb-release
|
||||
fi
|
||||
|
||||
DEBIAN_VERSION=$(lsb_release -sr)
|
||||
|
||||
sudo apt-get update -y
|
||||
|
||||
sudo apt-get install -y gdbserver gdb unzip curl \
|
||||
libncurses-dev libreadline-dev clang g++ \
|
||||
gcc git cmake make ccache
|
||||
|
||||
if [[ $DEBIAN_VERSION -eq "10" ]]; then
|
||||
sudo apt-get install -y default-libmysqlclient-dev libssl-dev libreadline-dev libncurses-dev mariadb-server
|
||||
libboost-system1.6*-dev libboost-filesystem1.6*-dev libboost-program-options1.6*-dev libboost-iostreams1.6*-dev \
|
||||
else # Debian 8 and 9 should work using this
|
||||
sudo apt-get install -y libmysqlclient-dev libssl1.0-dev mysql-server
|
||||
fi
|
29
apps/installer/includes/os_configs/osx.sh
Normal file
29
apps/installer/includes/os_configs/osx.sh
Normal file
@ -0,0 +1,29 @@
|
||||
##########################################
|
||||
## workaround for python upgrade issue https://github.com/actions/runner-images/issues/6817
|
||||
rm /usr/local/bin/2to3 || true
|
||||
rm /usr/local/bin/2to3-3.10 || true
|
||||
rm /usr/local/bin/2to3-3.11 || true
|
||||
rm /usr/local/bin/idle3 || true
|
||||
rm /usr/local/bin/idle3.10 || true
|
||||
rm /usr/local/bin/idle3.11 || true
|
||||
rm /usr/local/bin/pydoc3 || true
|
||||
rm /usr/local/bin/pydoc3.10 || true
|
||||
rm /usr/local/bin/pydoc3.11 || true
|
||||
rm /usr/local/bin/python3 || true
|
||||
rm /usr/local/bin/python3.10 || true
|
||||
rm /usr/local/bin/python3.11 || true
|
||||
rm /usr/local/bin/python3-config || true
|
||||
rm /usr/local/bin/python3.10-config || true
|
||||
rm /usr/local/bin/python3.11-config || true
|
||||
##########################################
|
||||
|
||||
brew update
|
||||
|
||||
##########################################
|
||||
## workaround for cmake already being installed in the github runners
|
||||
if ! command -v cmake &>/dev/null ; then
|
||||
brew install cmake
|
||||
fi
|
||||
##########################################
|
||||
|
||||
brew install openssl@3 readline boost@1.82 bash-completion curl unzip mysql@8.1 ccache
|
29
apps/installer/includes/os_configs/ubuntu.sh
Normal file
29
apps/installer/includes/os_configs/ubuntu.sh
Normal file
@ -0,0 +1,29 @@
|
||||
if ! command -v lsb_release &>/dev/null ; then
|
||||
sudo apt-get install -y lsb-release
|
||||
fi
|
||||
|
||||
UBUNTU_VERSION=$(lsb_release -sr);
|
||||
|
||||
sudo apt update
|
||||
|
||||
# shared deps
|
||||
sudo apt-get -y install ccache clang cmake curl google-perftools libmysqlclient-dev make unzip
|
||||
|
||||
if [[ $CONTINUOUS_INTEGRATION || $DOCKER ]]; then
|
||||
sudo add-apt-repository -y ppa:mhier/libboost-latest && sudo apt update && sudo apt-get -y install build-essential cmake-data \
|
||||
libboost1.74-dev libbz2-dev libncurses5-dev libmysql++-dev libgoogle-perftools-dev libreadline6-dev libssl-dev libtool \
|
||||
openssl zlib1g-dev
|
||||
else
|
||||
case $UBUNTU_VERSION in
|
||||
"20.04")
|
||||
sudo apt-get install -y g++ gdb gdbserver gcc git \
|
||||
libboost-all-dev libbz2-dev libncurses-dev libreadline-dev \
|
||||
libssl-dev mysql-server
|
||||
;;
|
||||
*)
|
||||
sudo add-apt-repository -y ppa:mhier/libboost-latest && sudo apt update && sudo apt-get install -y g++ gdb gdbserver gcc git \
|
||||
libboost-all-dev libbz2-dev libncurses-dev libreadline-dev \
|
||||
libssl-dev mysql-server
|
||||
;;
|
||||
esac
|
||||
fi
|
30
apps/installer/includes/os_configs/windows.sh
Normal file
30
apps/installer/includes/os_configs/windows.sh
Normal file
@ -0,0 +1,30 @@
|
||||
# install chocolatey before
|
||||
|
||||
@"%SystemRoot%\System32\WindowsPowerShell\v1.0\powershell.exe" -NoProfile -InputFormat None -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))" && SET "PATH=%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
|
||||
|
||||
# install automatically following packages:
|
||||
# cmake
|
||||
# git
|
||||
# microsoft-build-tools
|
||||
# mysql
|
||||
|
||||
INSTALL_ARGS=""
|
||||
|
||||
if [[ $CONTINUOUS_INTEGRATION ]]; then
|
||||
INSTALL_ARGS=" --no-progress "
|
||||
else
|
||||
{ # try
|
||||
choco uninstall -y -n cmake.install cmake # needed to make sure that following install set the env properly
|
||||
} || { # catch
|
||||
echo "nothing to do"
|
||||
}
|
||||
|
||||
choco install -y --skip-checksums $INSTALL_ARGS git visualstudio2022community
|
||||
fi
|
||||
|
||||
choco install -y --skip-checksums $INSTALL_ARGS cmake.install -y --installargs 'ADD_CMAKE_TO_PATH=System'
|
||||
choco install -y --skip-checksums $INSTALL_ARGS visualstudio2022-workload-nativedesktop
|
||||
choco install -y --skip-checksums $INSTALL_ARGS openssl --version=3.1.1
|
||||
choco install -y --skip-checksums $INSTALL_ARGS boost-msvc-14.3 --version=1.82.0
|
||||
choco install -y --skip-checksums $INSTALL_ARGS mysql --version=8.0.31
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user