First Commit

This commit is contained in:
mikx 2025-03-18 19:19:03 -04:00
commit 93073b0be2
10024 changed files with 9034050 additions and 0 deletions

View File

@ -0,0 +1,53 @@
// If you want to run as a non-root user in the container, see .devcontainer/docker-compose.yml.
{
"name": "ac-dev-server",
// Update the 'dockerComposeFile' list if you have more compose files or use different names.
// set an empty array to automatically solve
// the docker-compose files (including the .override.yml)
// https://github.com/microsoft/vscode-remote-release/issues/1080#issuecomment-824213014
// it requires vscode 1.57+
"dockerComposeFile": [],
// The 'service' property is the name of the service for the container that VS Code should
// use. Update this value and .devcontainer/docker-compose.yml to the real service name.
"service": "ac-dev-server",
// The optional 'workspaceFolder' property is the path VS Code should open by default when
// connected. This is typically a file mount in .devcontainer/docker-compose.yml
"workspaceFolder": "/azerothcore",
// Set *default* container specific settings.json values on container create.
"settings": {
"terminal.integrated.shell.linux": null
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"notskm.clang-tidy",
"xaver.clang-format",
"bbenoist.doxygen",
"ms-vscode.cpptools",
"austin.code-gnu-global",
"twxs.cmake",
"mhutchie.git-graph",
"github.vscode-pull-request-github",
"eamodio.gitlens",
"cschlosser.doxdocgen",
"sanaajani.taskrunnercode"
],
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Uncomment the next line if you want start specific services in your Docker Compose config.
"runServices": ["ac-dev-server", "ac-database"],
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
// "shutdownAction": "none",
// Uncomment the next line to run commands after the container is created - for example installing curl.
// "postCreateCommand": "apt-get update && apt-get install -y curl",
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "root"
}

View File

@ -0,0 +1,35 @@
version: '3.9'
services:
# Update this to the name of the service you want to work with in your docker-compose.yml file
ac-dev-server:
# If you want add a non-root user to your Dockerfile, you can use the "remoteUser"
# property in devcontainer.json to cause VS Code its sub-processes (terminals, tasks,
# debugging) to execute as the user. Uncomment the next line if you want the entire
# container to run as this user instead. Note that, on Linux, you may need to
# ensure the UID and GID of the container user you create matches your local user.
# See https://aka.ms/vscode-remote/containers/non-root for details.
#
# user: vscode
# Uncomment if you want to override the service's Dockerfile to one in the .devcontainer
# folder. Note that the path of the Dockerfile and context is relative to the *primary*
# docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile"
# array). The sample below assumes your primary file is in the root of your project.
#
# build:
# context: .
# dockerfile: .devcontainer/Dockerfile
#volumes:
# Update this to wherever you want VS Code to mount the folder of your project
#- .:/workspace:cached
# Uncomment the next line to use Docker from inside the container. See https://aka.ms/vscode-remote/samples/docker-from-docker-compose for details.
# - /var/run/docker.sock:/var/run/docker.sock
# Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust.
# cap_add:
# - SYS_PTRACE
# security_opt:
# - seccomp:unconfined
tty: true

14
.dockerignore Normal file
View File

@ -0,0 +1,14 @@
/cmake-build-debug/*
/build*/
/var/*
!/var/build/.gitkeep
!/var/ccache/.gitkeep
/env/dist/*
!/env/dist/.gitkeep
/env/user/*
/.env*
.idea
!.gitkeep
# do not ignore the ccache folder (used by the ci)
!/var/docker/ccache

17
.editorconfig Normal file
View File

@ -0,0 +1,17 @@
[*]
charset = utf-8
indent_style = space
indent_size = 4
tab_width = 4
insert_final_newline = true
trim_trailing_whitespace = true
max_line_length = 80
[*.{json,ts,js,yml}]
charset = utf-8
indent_style = space
indent_size = 2
tab_width = 2
insert_final_newline = true
trim_trailing_whitespace = true
max_line_length = 80

53
.git_commit_template.txt Normal file
View File

@ -0,0 +1,53 @@
### TITLE
## Type(Scope/Subscope): Commit ultra short explanation
## |---- Write below the examples with a maximum of 50 characters ----|
## Example 1: fix(DB/SAI): Missing spell to NPC Hogger
## Example 2: fix(CORE/Raid): Phase 2 of Ragnaros
## Example 3: feat(CORE/Commands): New GM command to do something
### DESCRIPTION
## Explain why this change is being made, what does it fix etc...
## |---- Write below the examples with a maximum of 72 characters per lines ----|
## Example: Hogger (id: 492) was not charging player when being engaged.
## Provide links to any issue, commit, pull request or other resource
## Example 1: Closes AzerothCore issue #23
## Example 2: Ported from other project's commit (link)
## Example 3: References taken from wowpedia / wowhead / wowwiki / https://wowgaming.altervista.org/aowow/
### CO-AUTHOR(S)
## If there are more authors they can be mentioned like this
## Co-authored-by: name <name@example.com>
## =======================================================
## EXTRA INFOS
## =======================================================
## "Type" can be:
## feat (new feature)
## fix (bug fix)
## refactor (refactoring production code)
## style (formatting, missing semi colons, etc; no code change)
## docs (changes to documentation)
## test (adding or refactoring tests; no production code change)
## chore (updating bash scripts, git files etc; no production code change)
## --------------------
## Remember to
## Capitalize the subject line
## Use the imperative mood in the subject line
## Do not end the subject line with a period
## Separate subject from body with a blank line
## Use the body to explain what and why rather than how
## Can use multiple lines with "-" for bullet points in body
## --------------------
## More info here https://www.conventionalcommits.org/en/v1.0.0-beta.2/
## =======================================================
## "Scope" can be:
## CORE (core related, c++)
## DB (database related, sql)
## =======================================================
## "Subscope" is optional and depends on the nature of the commit.
## =======================================================

30
.gitattributes vendored Normal file
View File

@ -0,0 +1,30 @@
# Auto detect text files and perform LF normalization
* text eol=lf
# Whitespace rules
# strict (no trailing, no tabs)
*.cpp whitespace=trailing-space,space-before-tab,tab-in-indent,cr-at-eol
*.h whitespace=trailing-space,space-before-tab,tab-in-indent,cr-at-eol
# normal (no trailing)
*.sql whitespace=trailing-space,space-before-tab,cr-at-eol
*.txt whitespace=trailing-space,space-before-tab,cr-at-eol
# special files which must ignore whitespace
*.patch whitespace=-trailing-space eol=lf
*.diff whitespace=-trailing-space eol=lf
# Standard to msysgit
*.doc diff=astextplain
*.DOC diff=astextplain
*.docx diff=astextplain
*.DOCX diff=astextplain
*.dot diff=astextplain
*.DOT diff=astextplain
*.pdf diff=astextplain
*.PDF diff=astextplain
*.rtf diff=astextplain
*.RTF diff=astextplain
# Ignore sql/* files
data/sql/* linguist-documentation

138
.github/CODE_OF_CONDUCT.md vendored Normal file
View File

@ -0,0 +1,138 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, caste, color, religion, or sexual
identity and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, fun, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the overall
community
Examples of unacceptable behavior include:
* The use of sexualized imagery, and sexual attention or advances of
any kind
* The use of sexulized language which could reasonably be considered inappropriate.
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email address,
without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct.
## Scope
This Code of Conduct applies within all community spaces, which includes but is not limited to AzerothCore
managed sites and community spaces, and also applies when an individual is officially representing the
community in public spaces.
Examples of representing our community include
* Using an official e-mail address
* Posting via an official social media account
* Acting as an appointed representative at an online or offline event
* Communicating within the WoW Emulation communities
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at the AzerothCore
[https://discord.gg/gkt4y2x][discord].
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private or public, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series of
actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or permanent
ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within the
community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.1, available at
[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
[https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations
[discord]: https://discord.gg/gkt4y2x

30
.github/CONTRIBUTING.md vendored Normal file
View File

@ -0,0 +1,30 @@
## CONTRIBUTING
AzerothCore can also serve as a learning resource for aspiring developers who want to understand how WoW servers work, how MMORPGs are structured, how game server emulators are created, or to improve their C++ and SQL knowledge.
If you want to contribute to the project, you will find a lot of resources that will guide you in our [wiki](https://www.azerothcore.org/wiki/contribute).
We also recommend you read our [Contributor Covenant Code of Conduct](https://github.com/azerothcore/azerothcore-wotlk/blob/master/.github/CODE_OF_CONDUCT.md).
Feel free to join our [Discord server](https://discord.gg/gkt4y2x).
## AUTHORS & CONTRIBUTORS
This project exists thanks to the [authors](https://github.com/azerothcore/azerothcore-wotlk/blob/master/AUTHORS).
## IMPORTANT LINKS
- [Doxygen documentation](https://www.azerothcore.org/pages/doxygen/index.html)
- [Website](http://www.azerothcore.org/)
- [AzerothCore catalogue](http://www.azerothcore.org/catalogue.html "Modules, tools, and other stuff for AzerothCore") (modules, tools, etc...)
- [Our Discord server](https://discord.gg/gkt4y2x)
- [Our wiki](http://www.azerothcore.org/wiki "Easy to use and developed by AzerothCore founder")
- [Our forum](https://github.com/azerothcore/azerothcore-wotlk/discussions/)
- [Our Facebook page](https://www.facebook.com/AzerothCore/)
- [Our LinkedIn page](https://www.linkedin.com/company/azerothcore/)
All contributions, big or small, are appreciated <3
The AzerothCore Staff appreciate all the help and contribution that *you* put your time into.
Thank you!

1
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1 @@
custom: https://www.paypal.com/donate/?hosted_button_id=L69ANPSR8BJDU

View File

@ -0,0 +1,95 @@
name: Game issues
description: Create a bug report to help us improve.
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
An issue that is not properly filled out will be closed.
You can read more about the standards for a bug report [here](https://www.azerothcore.org/wiki/issue-tracker-standards).
- type: textarea
id: current
attributes:
label: Current Behaviour
description: |
Description of the problem or issue here.
Include entries of affected creatures / items / quests / spells etc.
Never upload files! Use GIST for text and YouTube for videos!
validations:
required: true
- type: textarea
id: expected
attributes:
label: Expected Behaviour
description: |
Tell us what should happen instead.
validations:
required: true
- type: textarea
id: source
attributes:
label: Source
description: |
If you have a source that proves how it is supposed to work, please add that to make it easier for devs to fix the issue.
validations:
required: false
- type: textarea
id: reproduce
attributes:
label: Steps to reproduce the problem
description: |
What does someone else need to do to encounter the same bug?
placeholder: |
1. Step 1
2. Step 2
3. Step 3
validations:
required: true
- type: textarea
id: extra
attributes:
label: Extra Notes
description: |
Do you have any extra notes that can help solve the issue that does not fit any other field?
placeholder: |
None
validations:
required: false
- type: textarea
id: commit
attributes:
label: AC rev. hash/commit
description: |
Paste the entire output result of the `.server debug` command. (If you need to run it from the client get a prat addon)
placeholder: |
Paste the entire output result of the `.server debug` command. (If you need to run it from the client get a prat addon)
validations:
required: true
- type: input
id: os
attributes:
label: Operating system
description: |
The Operating System the Server is running on.
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
validations:
required: true
- type: textarea
id: custom
attributes:
label: Custom changes or Modules
description: |
List which custom changes or modules you have applied, i.e. Eluna module, etc.
placeholder: |
None
validations:
required: false
- type: markdown
attributes:
value: |
Thank you for your contribution.
If you use AzerothCore regularly, we really NEED your help to:
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
With your help, the project can evolve much quicker!

View File

@ -0,0 +1,86 @@
name: Crash / Server Crash issues
description: Did your server crash? Post an issue here!
title: "Crash: "
labels: ["Priority-Critical", "HasBacktrace"]
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
An issue that is not properly filled out will be closed.
- type: textarea
id: current
attributes:
label: Steps to Reproduce
description: |
If possible provide detailed steps to reproduce the crash.
placeholder: |
1. Provide the exact steps to trigger the crash.
2. Include any relevant configurations or commands.
3. Mention if the crash is consistent or intermittent.
validations:
required: false
- type: markdown
attributes:
value: |
NOTE: Make sure your server was compiled in RelWithDebug or Debug mode as crashlogs from Release do not contain enough information.
- type: textarea
id: logs
attributes:
label: Logs and Screenshots
description: |
Do you have any logs or screenshots that can be useful?
Crash logs in text are preffered over screenshots.
If you have logs in text form please upload them to [Gist](https://gist.github.com/) or PasteBin and upload the link.
validations:
required: false
- type: input
id: os
attributes:
label: Operating System
description: |
The Operating System you are having issues on.
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
validations:
required: true
- type: textarea
id: deps
attributes:
label: Dependencies & versions
description: |
Relevant information about dependencies and their versions that can be useful to debug the issue.
Example:
- OpenSSL ver ...
- Boost ver ...
- MySQL ver ...
- Visual Studio ver ...
- GCC ver ...
- Clang ver ...
- CMake ver ...
validations:
required: true
- type: input
id: commit
attributes:
label: Commit
description: |
Which commit hash are you using.
validations:
required: true
- type: textarea
id: extra
attributes:
label: Additional Context
description: |
Do you have any other relevant information about the issue?
validations:
required: false
- type: markdown
attributes:
value: |
Thank you for your contribution.
If you use AzerothCore regularly, we really NEED your help to:
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
With your help, the project can evolve much quicker!

View File

@ -0,0 +1,75 @@
name: Build/Tools/Apps issues
description: Got an issue with build, tools or apps? Create an issue to let us know!
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to fill out a bug report. Remember to fill out all fields including the title above.
An issue that is not properly filled out will be closed.
- type: textarea
id: current
attributes:
label: Current Behaviour
description: |
What actually happens and how do we reproduce it?
validations:
required: true
- type: textarea
id: logs
attributes:
label: Logs and Screenshots
description: |
Do you have any logs or screenshots that can be useful?
If you have logs in text form please upload them to [Gist](https://gist.github.com/) or PasteBin and upload the link.
validations:
required: false
- type: input
id: os
attributes:
label: Operating System
description: |
The Operating System you are having issues on.
i.e. Windows 11 x64, Debian 10 x64, macOS 12, Ubuntu 20.04
validations:
required: true
- type: textarea
id: deps
attributes:
label: Dependencies & versions
description: |
Relevant information about dependencies and their versions that can be useful to debug the issue.
Example:
- OpenSSL ver ...
- Boost ver ...
- MySQL ver ...
- Visual Studio ver ...
- GCC ver ...
- Clang ver ...
- CMake ver ...
validations:
required: true
- type: input
id: commit
attributes:
label: Commit
description: |
Which commit hash are you using.
validations:
required: true
- type: textarea
id: extra
attributes:
label: Additional Context
description: |
Do you have any other relevant information about the issue?
validations:
required: false
- type: markdown
attributes:
value: |
Thank you for your contribution.
If you use AzerothCore regularly, we really NEED your help to:
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
With your help, the project can evolve much quicker!

17
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1,17 @@
blank_issues_enabled: false
contact_links:
- name: Website
url: https://www.azerothcore.org
about: On the AC website you can find a lot of things, such as existing modules.
- name: Wiki
url: https://www.azerothcore.org/wiki
about: You can find plenty of information on our Wiki.
- name: FAQ
url: https://www.azerothcore.org/wiki/faq
about: Frequently asked questions.
- name: Common Errors
url: https://www.azerothcore.org/wiki/common-errors
about: You can find common errors and their solutions here.
- name: Discord
url: https://discord.gg/gkt4y2x
about: Join the discussions over at our Discord Server.

View File

@ -0,0 +1,43 @@
name: Feature request
description: Suggest an idea for this project
title: "Feature: "
labels: "Feature"
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to fill out a feature request. Remember to fill out all fields including the title above.
An issue that is not properly filled out will be closed.
- type: textarea
id: description
attributes:
label: Describe your feature request or suggestion in detail
description: |
A clear and concise description of what you want to happen.
validations:
required: true
- type: textarea
id: solution
attributes:
label: Describe a possible solution to your feature or suggestion in detail
description: |
A clear and concise description of any alternative solutions or features you've considered.
validations:
required: false
- type: textarea
id: additional
attributes:
label: Additional context
description: |
Add any other context or screenshots about the feature request here.
validations:
required: false
- type: markdown
attributes:
value: |
Thank you for your contribution.
If you use AzerothCore regularly, we really NEED your help to:
- Test our fixes: https://www.azerothcore.org/wiki/How-to-test-a-PR
- Report issues or suggestions: https://github.com/azerothcore/azerothcore-wotlk/issues/new/choose
- Improve the documentation/wiki: https://www.azerothcore.org/wiki/home
With your help, the project can evolve much quicker!

12
.github/ISSUE_TEMPLATE/ee_cc.yml vendored Normal file
View File

@ -0,0 +1,12 @@
name: CC Triage
description: This template is only used for ChromieCraft
labels: ["ChromieCraft Generic"]
body:
- type: textarea
id: current
attributes:
label: Triage
description: |
Paste the issue from ChromieCraft here.
validations:
required: true

92
.github/README.md vendored Normal file
View File

@ -0,0 +1,92 @@
# ![logo](https://raw.githubusercontent.com/azerothcore/azerothcore.github.io/master/images/logo-github.png) AzerothCore
[![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-2.1-4baaaa.svg)](CODE_OF_CONDUCT.md)
[![CodeFactor](https://www.codefactor.io/repository/github/azerothcore/azerothcore-wotlk/badge)](https://www.codefactor.io/repository/github/azerothcore/azerothcore-wotlk)
[![StackOverflow](http://img.shields.io/badge/stackoverflow-azerothcore-blue.svg?logo=stackoverflow)](https://stackoverflow.com/questions/tagged/azerothcore?sort=newest "Ask / browse questions here")
[![Discord](https://img.shields.io/discord/217589275766685707?logo=discord&logoColor=white)](https://discord.gg/gkt4y2x "Our community hub on Discord")
## Build Status
[![nopch-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core-build-nopch.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core-build-nopch.yml?query=branch%3Amaster)
[![pch-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core-build-pch.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core-build-pch.yml?query=branch%3Amaster)
[![core-modules-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core_modules_build.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/core_modules_build.yml?query=branch%3Amaster)
[![windows-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/windows_build.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/windows_build.yml?query=branch%3Amaster)
[![macos-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/macos_build.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/macos_build.yml?query=branch%3Amaster)
[![docker-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/docker_build.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/docker_build.yml?query=branch%3Amaster)
[![tools-build](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/tools_build.yml/badge.svg?branch=master)](https://github.com/azerothcore/azerothcore-wotlk/actions/workflows/tools_build.yml?query=branch%3Amaster)
## Introduction
AzerothCore is an open-source game server application and framework designed for hosting massively multiplayer online role-playing games (MMORPGs). It is based on the popular MMORPG World of Warcraft (WoW) and seeks to recreate the gameplay experience of the original game from patch 3.3.5a.
The original code is based on MaNGOS, TrinityCore, and SunwellCore and has since then had extensive development to improve stability, in-game mechanics, and modularity to the game. AC has also grown into a community-driven project with a significant number of contributors and developers. It is written in C++ and provides a solid foundation for creating private servers that mimic the mechanics and behavior of the official WoW servers.
## Philosophy
Our main goal is to create a playable game server, offering a fully working in-game experience.
Here are the main points we focus on:
* Stability
* We make sure all changes pass the CIs before being merged into the master branch.
* Blizzlike content
* We strive to make all in-game content to be blizzlike. Therefore we have a high standard for fixes being made.
* Customization
* It is easy to customize your experience using [modules](#modules).
* Community driven
* AzerothCore has an active community of developers, contributors, and users who collaborate, share knowledge, and provide support through forums, Discord channels, and other communication platforms.
### Modules
AzerothCore is designed to be highly modular, allowing developers to extend and customize the game to suit their preferences or create unique gameplay experiences. This flexibility enables the addition of custom features, content, and modifications.
We have a lot of modules already made by the community, many of which can be found in the [Module Catalogue](https://www.azerothcore.org/catalogue.html#/).
## Installation
Detailed installation instructions are available [here](http://www.azerothcore.org/wiki/installation).
## Contributing
AzerothCore can also serve as a learning resource for aspiring developers who want to understand how WoW servers work, how MMORPGs are structured, how game server emulators are created, or to improve their C++ and SQL knowledge.
If you want to contribute to the project, you will find a lot of resources that will guide you in our [wiki](https://www.azerothcore.org/wiki/contribute).
We also recommend you read our [Contributor Covenant Code of Conduct](https://github.com/azerothcore/azerothcore-wotlk/blob/master/.github/CODE_OF_CONDUCT.md).
Feel free to join our [Discord server](https://discord.gg/gkt4y2x).
Click on the "⭐ Star" button to help us gain more visibility on Github!
## Authors & Contributors
The project was born in 2016 based on SunwellCore. Unfortunately, SunwellCore was published without any git history, so on git there are no credits for all the contributors before 2016.
You can check the [authors](https://github.com/azerothcore/azerothcore-wotlk/blob/master/AUTHORS) file for more details.
## Important Links
- [Doxygen documentation](https://www.azerothcore.org/pages/doxygen/index.html)
- [Website](http://www.azerothcore.org/)
- [AzerothCore catalogue](http://www.azerothcore.org/catalogue.html "Modules, tools, and other stuff for AzerothCore") (modules, tools, etc...)
- [Our Discord server](https://discord.gg/gkt4y2x)
- [Our wiki](http://www.azerothcore.org/wiki "Easy to use and developed by AzerothCore founder")
- [Our forum](https://github.com/azerothcore/azerothcore-wotlk/discussions/)
- [Our Facebook page](https://www.facebook.com/AzerothCore/)
- [Our LinkedIn page](https://www.linkedin.com/company/azerothcore/)
## License
- The new AzerothCore source components are released under the [GNU AGPL v3](https://www.gnu.org/licenses/agpl-3.0.en.html)
- The old sources based on MaNGOS/TrinityCore are released under the [GNU GPL v2](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
It's important to note that AzerothCore is not an official Blizzard Entertainment product, and it is not affiliated with or endorsed by World of Warcraft or Blizzard Entertainment. AzerothCore does not in any case sponsor nor support illegal public servers. If you use this project to run an illegal public server and not for testing and learning it is your own personal choice.
## Special thanks
[JetBrains](https://www.jetbrains.com/?from=AzerothCore) is providing free [open-source licenses](https://www.jetbrains.com/community/opensource/) to the AzerothCore developers.
[![JetBrains logo.](https://resources.jetbrains.com/storage/products/company/brand/logos/jetbrains.svg)](https://jb.gg/OpenSourceSupport)

97
.github/SECURITY.md vendored Normal file
View File

@ -0,0 +1,97 @@
# Security Policy
## Supported Versions
We support the following versions of dependencies.
| Icon | Meaning |
| :------------------- | :---------------: |
| :white_check_mark: | **Supported** |
| :red_circle: | **NOT** Supported |
| :large_blue_diamond: | **Recommended** |
### Versions of AzerothCore:
| AzerothCore Branch | Status | Recommended |
| ---------------------------- | :----------------: | :------------------: |
| **master** | :white_check_mark: | :large_blue_diamond: |
| Any non-official fork | :red_circle: | |
| Any Playerbots fork | :red_circle: | |
| Any NPCBots fork | :red_circle: | |
| Any AC (non-official) repack | :red_circle: | |
### Supported Operating Systems
| Linux (Ubuntu) | Status | Recommended |
| :------------- | :----------------: | :------------------: |
| 24.04 | :white_check_mark: | :large_blue_diamond: |
| 22.04 | :white_check_mark: | |
| 20.04 ≤ | :red_circle: | |
| macOS | Status | Recommended |
| :---- | :----------------: | :------------------: |
| 14 | :white_check_mark: | :large_blue_diamond: |
| 12 ≤ | :red_circle: | |
| Windows | Status | Recommended |
| :------------ | :----------------: | :------------------: |
| Windows 11 | :white_check_mark: | :large_blue_diamond: |
| Windows 10 | :white_check_mark: |
| Windows 8.1 ≤ | :red_circle: |
<br>
### Supported Boost Versions:
| Boost | Status | Recommended |
| :----- | :----------------: | :------------------: |
| 1.70 ≥ | :white_check_mark: | :large_blue_diamond: |
### Supported OpenSSL Versions:
| OpenSSL | Status | Recommended |
| :------ | :----------------: | :------------------: |
| 3.X.X ≥ | :white_check_mark: | :large_blue_diamond: |
### Supported CMake Versions:
| CMake | Status | Recommended |
| :----- | :----------------: | :------------------: |
| 3.16 ≥ | :white_check_mark: | :large_blue_diamond: |
### Supported MySQL Versions:
| MySQL | Status | Recommended |
| :---- | :----------------: | :------------------: |
| 8.4 ≥ | :white_check_mark: | :large_blue_diamond: |
| 8.0 | :white_check_mark: | |
| 8.1 | :red_circle: | |
| 8.0 < | :red_circle: | |
### Supported CLang Versions:
| CLang | Status | Recommended |
| :---- | :----------------: | :------------------: |
| 18 | :white_check_mark: | :large_blue_diamond: |
| 15 | :white_check_mark: | |
| 14 ≤ | :red_circle: | |
### Supported GCC Versions:
| GCC | Status | Recommended |
| :--- | :----------------: | :------------------: |
| 14 | :white_check_mark: | :large_blue_diamond: |
| 12 | :white_check_mark: | |
| 11 ≤ | :red_circle: | |
> [!NOTE]
> We do **NOT** support any repacks that may or may not have been made based on AzerothCore. This is because they are usually based on older versions and there is no way to know what is in the precompiled binaries. Instead, you should compile your binaries from the AzerothCore source. To get started, read the [Installation Guide](https://www.azerothcore.org/wiki/installation).
> [!CAUTION]
> [Why you should not use repacks to run your WoW server](https://www.mangosrumors.org/why-you-should-not-use-repacks-to-run-your-wow-server/)
## Reporting a Vulnerability
We class a vulnerability to be any hack or exploit that has an impact on the server performance or that gives unfair advantages in the game (e.g. fly hacking or injection tools).
If a new vulnerability is found you should always create a new [bug report](https://github.com/azerothcore/azerothcore-wotlk/issues/new?assignees=&labels=&projects=&template=bug_report.yml).

97
.github/SUPPORT.md vendored Normal file
View File

@ -0,0 +1,97 @@
# How to ask for help
### Need help?
Do you need support with AzerothCore? No worries, we're happy to help you!
Whether you have troubles installing AzerothCore, or you want to ask how-to or generic help questions, **we will help you**.
We just want you to ask for support in the **proper way**. Please read this document before asking for any help.
### Why is the "proper way" so important?
If you ask a question directly in the chat, it can get easily lost and you might never be helped unless there is someone online in that specific moment that can help you.
If you put your question on StackOverflow it will **stay** there and someone can help you at any moment. You can also link your question in the chat to give it more visibility. Doesn't make sense for you? Just keep reading.
Also, most of the questions asked by users are repetitive. So we need an efficient way to handle all the support requests. We use StackOverflow for support questions and GitHub for bug reports.
Sounds complicated? Not at all! **Just keep reading**.
### Bug reports
A game feature (e.g. spell/quest/talent/etc..) doesn't work as it is supposed to?
Congratulations! You've just found a bug, please search among the [existing issues](https://github.com/azerothcore/azerothcore-wotlk/issues). There is a good chance that someone else has already reported the same bug that you found, in such case we kindly ask you to "confirm" it by leaving a comment.
Example:
![AzerothCore example issue search](https://user-images.githubusercontent.com/75517/51130957-9b9f9580-182e-11e9-8f7f-11aa5d7b6d67.png)
If after searching for an existing issue report, you didn't find any, then you should [open a new issue](https://github.com/azerothcore/azerothcore-wotlk/issues/new).
### Other support requests
- Getting an error while installing AzerothCore?
- Is there something in the documentation that is not clear for you?
- Do you want to know how to do something specific with AzerothCore?
- Are you trying to do something like implementing a new feature, fixing a bug, etc...?
- Do you have questions about C++ or SQL code that is related to AzerothCore or other AC custom modules?
These kinds of questions are considered support questions and are handled via [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore). **Read below**
### Do I need to register in StackOverflow?
**Not necessarily!** If you have a Google or a Facebook account, you can already log in [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore) (click the **_Log in_** button on the top-right corner of the website).
![image](https://user-images.githubusercontent.com/75517/51560794-d84e3b00-1e85-11e9-8510-6f1dd0b33d18.png)
### Search among the existing questions
There is a good chance that your question has already been asked by someone else, so please **search** it on [StackOverflow](https://stackoverflow.com/questions/tagged/azerothcore).
For example, are you getting an error from MySQL? Search it under the **[azerothcore]** tag!
![image](https://user-images.githubusercontent.com/75517/51131779-f0dca680-1830-11e9-8ccc-ef10ee8855a6.png)
Oh, there is already a question! And it has an answer! Let's open it!
![image](https://user-images.githubusercontent.com/75517/51132034-85df9f80-1831-11e9-9cc9-3eaee236396e.png)
**Another example**, imagine you're wondering whether you can use LUA scripts on AzerothCore:
![image](https://user-images.githubusercontent.com/75517/51131089-f802b500-182e-11e9-9b5d-a438172b22ea.png)
There is already a question and answer for that! Let's upvote the guys who asked and answered:
![image](https://user-images.githubusercontent.com/75517/51131658-ac510b00-1830-11e9-98dc-e7f3ef0da058.png)
## How to ask new questions
What if you can't find a question related to my specific problem? **Let's [open a new question](https://stackoverflow.com/questions/ask)**.
Remember to:
- Add a descriptive message.
- **Bad** example `I got DB error plz help me`.
- **Good** example: `After importing the sql updates, I get the error "XXX"`
- Don't forget the **[azerothcore]** tag!
- ![image](https://user-images.githubusercontent.com/75517/51132313-364da380-1832-11e9-8483-3bb6898d438a.png)
- It's useful to add **4 more tags** according to the category of your question (e.g. `c++`, `docker`, `MySQL`, `sql`, etc... ).
- Link your question in the #support-general channel of our [Discord chat](https://discordapp.com/channels/217589275766685707/284406375495368704)
- Read: [stackoverflow.com/help/how-to-ask](https://stackoverflow.com/help/how-to-ask)
### Share your knowledge!
Do you have anything to share with the community? Do you feel like some information could be useful to someone else (or to yourself in the future)? Have you solved a problem that took a while to figure out?
Ask a question on StackOverflow and then **answer it yourself**! Is it allowed? Yes, it is! Read this:
[https://stackoverflow.com/help/self-answer](https://stackoverflow.com/help/self-answer)

View File

@ -0,0 +1,43 @@
name: docker tag and build
description: a helper action to shorten generating docker tags and building
inputs:
component-name:
description: name of the component/docker image (eg worldserver, authserver)
type: string
required: true
push:
description: whether to push the image or not
type: boolean
required: true
version:
description: version tag to use for docker image
required: true
type: string
dockerfile:
description: dockerfile to use
required: false
default: apps/docker/Dockerfile
runs:
using: composite
steps:
- name: Get Docker Metadata
id: meta
uses: docker/metadata-action@v5
with:
images: acore/ac-wotlk-${{ inputs.component-name }}
tags: |
type=raw,value=${{ inputs.version }}
type=ref,event=branch
- name: Build and push
uses: docker/build-push-action@v5
with:
context: ${{ github.workspace }}
file: ${{ inputs.dockerfile }}
push: ${{ inputs.push }}
tags: ${{ steps.meta.outputs.tags }}
target: ${{ inputs.component-name }}
build-args: |
USER_ID=1000
GROUP_ID=1000
DOCKER_USER=acore

192
.github/actions/linux-build/action.yml vendored Normal file
View File

@ -0,0 +1,192 @@
name: linux build
description: a helper action to shorten running a build on linux
inputs:
CC:
default: clang
description: C Compiler to use
type: string
required: true
CXX:
default: clang++
description: C++ compiler to use
type: string
required: true
modules:
default: false
description: Flag to install modules or not
required: true
type: boolean
tools:
default: none
description: Flag to enable tools build
required: false
type: string
pch:
default: false
description: Flag to enable or disable PCH
required: false
type: boolean
maxerrors:
default: 1
description: Max allowed error count before compilation stops
required: false
type: number
keepgoing:
default: false
description: Flag to continue build after errors
required: false
type: boolean
runs:
using: composite
steps:
- name: Cache
uses: actions/cache@v4
if: inputs.pch != 'true'
with:
path: ${{ github.workspace }}/var/ccache
# format
# ccache:OS:CC_CXX:MODULES:GITHUB_REF:GITHUB_SHA
key: ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}:${{ github.ref }}:${{ github.sha }}
restore-keys: |
ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}:${{ github.ref }}
ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}:${{ inputs.modules }}
ccache:${{ runner.os }}:${{ inputs.CC }}_${{ inputs.CXX }}
# This script moves sql files from "data/sql/updates/pending_$DB" to the
# proper folder for the db
- name: Process pending sql
shell: bash
run: bash apps/ci/ci-pending-sql.sh
- name: Install build dependencies
shell: bash
run: |
sudo apt update
sudo apt remove needrestart #refer: https://github.com/actions/runner-images/issues/9937
sudo apt-get -y install ccache clang cmake curl google-perftools \
libmysqlclient-dev make unzip build-essential cmake-data \
libboost-all-dev libbz2-dev libncurses5-dev libmysql++-dev \
libreadline6-dev libssl-dev libtool openssl zlib1g-dev
# Account for https://github.com/actions/runner-images/issues/8659
# based off of https://github.com/actions/runner-images/issues/8659#issuecomment-1852353116
UBUNTU_VERSION="$(grep VERSION_ID /etc/os-release | cut -f2 -d\")"
source /etc/os-release
if [[ "$VERSION_CODENAME" == "jammy" ]]; then
if [[ "${{ inputs.cc }}" =~ "clang-" ]]; then
CLANG_VERSION="$(echo '${{ inputs.cc }}' | cut -f2 -d\-)"
wget -qO- https://apt.llvm.org/llvm-snapshot.gpg.key | sudo apt-key add -
sudo add-apt-repository "deb http://apt.llvm.org/$VERSION_CODENAME/ llvm-toolchain-$VERSION_CODENAME-$CLANG_VERSION main"
sudo apt-get -qq update
sudo apt-get -qq install '${{ inputs.cc }}'
fi
fi
- name: setup ccache
shell: bash
env:
CCACHE_DIR: $GITHUB_WORKSPACE/var/ccache
run: |
cat <<EOF >> $GITHUB_ENV
CCACHE_DIR=${{ env.CCACHE_DIR }}
CCACHE_MAXSIZE=1000MB
CCACHE_SLOPPINESS=pch_defines,time_macros,include_file_mtime
CCACHE_CPP2=true
CCACHE_COMPRESS=1
CCACHE_COMPRESSLEVEL=9
CCACHE_COMPILERCHECK=content
CCACHE_LOGFILE=$CCACHE_DIR/cache.debug
CC=${{ inputs.CC }}
CXX=${{ inputs.CXX }}
EOF
- name: Configure
shell: bash
run: |
set -x
mkdir build
cd build
cmake "$GITHUB_WORKSPACE" \
-DCMAKE_INSTALL_PREFIX="$GITHUB_WORKSPACE/env/dist" \
-DAPPS_BUILD="all" \
-DTOOLS_BUILD=${{ inputs.tools }} \
-DSCRIPTS="static" \
-DMODULES="static" \
-DWITH_WARNINGS="ON" \
-DCMAKE_BUILD_TYPE="Release" \
-DCMAKE_CXX_COMPILER_LAUNCHER="ccache" \
-DCMAKE_C_COMPILER_LAUNCHER="ccache" \
-DCMAKE_C_FLAGS="-Werror ${{ startsWith(inputs.cc, 'clang') && '-ferror-limit=' || '-fmax-errors=' }}${{inputs.maxerrors}} " \
-DCMAKE_CXX_FLAGS="-Werror ${{ startsWith(inputs.cxx, 'clang') && '-ferror-limit=' || '-fmax-errors=' }}${{inputs.maxerrors}}" \
-DBUILD_TESTING="ON" \
-DUSE_SCRIPTPCH=${{ inputs.pch == 'true' && 'ON' || '' }} \
-DUSE_COREPCH=${{ inputs.pch == 'true' && 'ON' || '' }} \
${{ inputs.pch == 'true' && '' || '-DNOPCH=true' }}
- name: build
shell: bash
working-directory: "${{ github.workspace }}/build"
run: |
# '--' passes '--keep-going' to the underlying build system (make)
cmake --build . --config "Release" -j "$(($(nproc) + 2))" ${{ inputs.keepgoing == 'true' && '-- --keep-going' || '' }}
- name: install
shell: bash
working-directory: "${{ github.workspace }}/build"
run: cmake --install . --config "Release"
- name: Setup config
shell: bash
run: |
ls -1 env/dist/etc/*.conf.dist | while read -r dist; do
# chop the ".dist" off the end
config_name="$(<<< $dist rev | cut -f1 -d\. --complement | rev)"
cp -v "$dist" "$config_name"
done
cat <<EOF >> $GITHUB_ENV
AC_LOGIN_DATABASE_INFO=localhost;3306;root;root;acore_auth
AC_CHARACTER_DATABASE_INFO=localhost;3306;root;root;acore_characters
AC_WORLD_DATABASE_INFO=localhost;3306;root;root;acore_world
AC_DATA_DIR=env/dist/data
AC_LOGS_DIR=env/dist/logs
EOF
- name: get dbc files
shell: bash
run: |
git clone --depth 1 --branch master --single-branch https://github.com/ac-data/ac-data.git "$AC_DATA_DIR"
- name: Start MySQL container
shell: bash
run: sudo systemctl start mysql.service
- name: Dry run authserver
shell: bash
run: timeout 5m env/dist/bin/authserver --dry-run
- name: Dry run worldserver
shell: bash
run: timeout 5m env/dist/bin/worldserver --dry-run
- name: Check startup errors
shell: bash
run: |
error_log="$AC_LOGS_DIR/Errors.log"
# -s checks if the file's size is greater than 0 bytes
# ! -s checks if the file's size is less than/equal to 0 bytes
# if the error log is empty, exit without error
[[ ! -s "$error_log" ]] && exit 0
printf "The Errors.log file contains startup errors:\n\n"
cat "$error_log"
printf "\nPlease solve the startup errors listed above!\n"
exit 1
- name: Run unit tests
shell: bash
run: |
if [[ -f build/obj/src/test/unit_tests ]]; then
build/obj/src/test/unit_tests
else
exit 0
fi

59
.github/labeler.yml vendored Normal file
View File

@ -0,0 +1,59 @@
file-cpp:
- changed-files:
- any-glob-to-any-file:
- 'src/**/*.cpp'
- 'src/**/*.h'
- 'deps/**/*.cpp'
- 'deps/**/*.h'
DB:
- changed-files:
- any-glob-to-any-file: 'data/**/*.sql'
CORE:
- changed-files:
- any-glob-to-any-file:
- 'src/*'
- 'src/common/**/*'
- 'src/genrev/**/*'
- 'src/server/*'
- 'src/server/apps/**/*'
- 'src/server/database/**/*'
- 'src/server/game/**/*'
- 'src/server/shared/**/*'
- 'src/tools/**/*'
Script:
- changed-files:
- any-glob-to-any-file:
- 'src/server/scripts/**/*.cpp'
- 'src/server/scripts/**/*.h'
UnitTests:
- changed-files:
- any-glob-to-any-file: 'src/test/**/*'
Documentation:
- changed-files:
- any-glob-to-any-file: '**/*.md'
Bash:
- changed-files:
- any-glob-to-any-file:
- '*.sh'
- 'apps/**/*.sh'
- 'conf/**/*.sh'
- 'deps/**/*.sh'
- 'modules/**/*.sh'
CMake:
- changed-files:
- any-glob-to-any-file: '**/*.cmake'
Workflow:
- changed-files:
- any-glob-to-any-file: '.github/workflows/*'
Batch:
- changed-files:
- any-glob-to-any-file: 'apps/**/*.bat'

106
.github/workflows/add-to-project.yml vendored Normal file
View File

@ -0,0 +1,106 @@
name: Auto Assign to Project(s)
on:
issues:
types: [opened, labeled]
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
jobs:
assign_one_project:
runs-on: ubuntu-latest
name: Assign to One Project
steps:
- name: Assign issues with `ChromieCraft Generic` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, 'ChromieCraft Generic')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/20'
- name: Assign issues with `1-19` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '1-19')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/14'
- name: Assign issues with `20-29` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '20-29')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/17'
- name: Assign issues with `30-39` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '30-39')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/23'
- name: Assign issues with `40-49` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '40-49')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/24'
- name: Assign issues with `50-59` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '50-59')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/25'
- name: Assign issues with `60` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '60')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/22'
- name: Assign issues with `61-64` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '61-64')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/32'
- name: Assign issues with `65-69` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '65-69')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/36'
- name: Assign issues with `70` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '70')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/26'
- name: Assign issues with `71-74` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '71-74')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/33'
- name: Assign issues with `75-79` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '75-79')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/37'
- name: Assign issues with `80` label to their project
uses: srggrs/assign-one-project-github-action@1.2.1
if: |
contains(github.event.issue.labels.*.name, '80')
with:
project: 'https://github.com/azerothcore/azerothcore-wotlk/projects/38'

35
.github/workflows/codestyle.yml vendored Normal file
View File

@ -0,0 +1,35 @@
name: Codestyle
on:
pull_request:
types:
- opened
- reopened
- synchronize
paths:
- src/**
- "!README.md"
- "!docs/**"
jobs:
triage:
runs-on: ubuntu-latest
name: C++
if: github.repository == 'liyunfan1223/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: AzerothCore codestyle
run: python ./apps/codestyle/codestyle-cpp.py
- name: C++ Advanced
run: |
sudo apt update -y
sudo apt install -y cppcheck
cppcheck --force --inline-suppr --suppressions-list=./.suppress.cppcheck src/ --output-file=report.txt
if [ -s report.txt ]; then # if file is not empty
cat report.txt
exit 1 # let github action fails
fi

43
.github/workflows/core-build-nopch.yml vendored Normal file
View File

@ -0,0 +1,43 @@
name: nopch-build
on:
push:
branches:
- 'master'
pull_request:
types:
- opened
- reopened
- synchronize
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
jobs:
build:
strategy:
fail-fast: true
matrix:
include:
- os: ubuntu-22.04
compiler:
CC: clang-15
CXX: clang++-15
- os: ubuntu-24.04
compiler:
CC: clang-18
CXX: clang++-18
- os: ubuntu-24.04
compiler:
CC: gcc-14
CXX: g++-14
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}-nopch
if: github.repository == 'liyunfan1223/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/linux-build
with:
CC: ${{ matrix.compiler.CC }}
CXX: ${{ matrix.compiler.CXX }}
pch: false

41
.github/workflows/core-build-pch.yml vendored Normal file
View File

@ -0,0 +1,41 @@
name: pch-build
on:
push:
branches:
- 'master'
pull_request:
types:
- opened
- reopened
- synchronize
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
jobs:
build:
strategy:
fail-fast: true
matrix:
include:
- os: ubuntu-22.04
compiler:
CC: clang-15
CXX: clang++-15
- os: ubuntu-24.04
compiler:
CC: clang-18
CXX: clang++-18
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}-${{ matrix.compiler }}-pch
env:
COMPILER: ${{ matrix.compiler }}
if: github.repository == 'liyunfan1223/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/linux-build
with:
CC: ${{ matrix.compiler.CC }}
CXX: ${{ matrix.compiler.CXX }}
pch: true

View File

@ -0,0 +1,99 @@
# This starter workflow is for a CMake project running on multiple platforms. There is a different starter workflow if you just want a single platform.
# See: https://github.com/actions/starter-workflows/blob/main/ci/cmake-single-platform.yml
name: ubuntu-build
on:
push:
branches: [ "Playerbot" ]
pull_request:
branches: [ "Playerbot" ]
jobs:
build:
strategy:
# Set fail-fast to false to ensure that feedback is delivered for all matrix combinations. Consider changing this to true when your workflow is stable.
fail-fast: false
matrix:
# the result of the matrix will be the combination of all attributes, so we get os*compiler builds
include:
- os: ubuntu-22.04
c_compiler: clang
cpp_compiler: clang++
build_type: Release
- os: ubuntu-22.04
c_compiler: gcc
cpp_compiler: g++
build_type: Release
- os: ubuntu-24.04
c_compiler: gcc
cpp_compiler: g++
build_type: Release
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}-${{ matrix.cpp_compiler }}
steps:
- name: Checkout AzerothCore
uses: actions/checkout@v3
- name: Set reusable strings
# Turn repeated input strings (such as the build output directory) into step outputs. These step outputs can be used throughout the workflow file.
id: strings
shell: bash
run: |
echo "build-output-dir=${{ github.workspace }}/build" >> "$GITHUB_OUTPUT"
# - name: Clone Playerbot Module
# run: git clone --depth=1 --branch=master https://github.com/liyunfan1223/mod-playerbots.git modules/mod-playerbots
- name: Checkout Playerbot Module
uses: actions/checkout@v3
with:
repository: 'liyunfan1223/mod-playerbots'
path: 'modules/mod-playerbots'
- name: Install Requirements
run: sudo apt-get update && sudo apt-get install git cmake make gcc g++ clang libmysqlclient-dev libssl-dev libbz2-dev libreadline-dev libncurses-dev mysql-server libboost-all-dev
# - name: Cache
# uses: actions/cache@v3
# with:
# path: var/ccache
# key: ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules:${{ github.ref }}:${{ github.sha }}
# restore-keys: |
# ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules:${{ github.ref }}
# ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules
# - name: Configure OS
# run: source ./acore.sh install-deps
# env:
# CONTINUOUS_INTEGRATION: true
# - name: Create conf/config.sh
# run: source ./apps/ci/ci-conf-core.sh
# - name: Process pending sql
# run: bash bin/acore-db-pendings
# - name: Build
# run: source ./apps/ci/ci-compile.sh
- name: Configure CMake
# Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make.
# See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type
run: >
cmake -B ${{ steps.strings.outputs.build-output-dir }}
-DCMAKE_CXX_COMPILER=${{ matrix.cpp_compiler }}
-DCMAKE_C_COMPILER=${{ matrix.c_compiler }}
-DCMAKE_BUILD_TYPE=${{ matrix.build_type }}
-S ${{ github.workspace }}
- name: Build
# Build your program with the given configuration. Note that --config is needed because the default Windows generator is a multi-config generator (Visual Studio generator).
run: cmake --build ${{ steps.strings.outputs.build-output-dir }} --config ${{ matrix.build_type }}
# - name: Test
# working-directory: ${{ steps.strings.outputs.build-output-dir }}
# # Execute tests defined by the CMake configuration. Note that --build-config is needed because the default Windows generator is a multi-config generator (Visual Studio generator).
# # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
# run: ctest --build-config ${{ matrix.build_type }}

99
.github/workflows/core-build.yml vendored Normal file
View File

@ -0,0 +1,99 @@
# This starter workflow is for a CMake project running on multiple platforms. There is a different starter workflow if you just want a single platform.
# See: https://github.com/actions/starter-workflows/blob/main/ci/cmake-single-platform.yml
name: ubuntu-build
on:
push:
branches: [ "Playerbot" ]
pull_request:
branches: [ "Playerbot" ]
jobs:
build:
strategy:
# Set fail-fast to false to ensure that feedback is delivered for all matrix combinations. Consider changing this to true when your workflow is stable.
fail-fast: false
matrix:
# the result of the matrix will be the combination of all attributes, so we get os*compiler builds
include:
- os: ubuntu-22.04
c_compiler: clang
cpp_compiler: clang++
build_type: Release
- os: ubuntu-22.04
c_compiler: gcc
cpp_compiler: g++
build_type: Release
- os: ubuntu-24.04
c_compiler: gcc
cpp_compiler: g++
build_type: Release
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}-${{ matrix.cpp_compiler }}
steps:
- name: Checkout AzerothCore
uses: actions/checkout@v3
- name: Set reusable strings
# Turn repeated input strings (such as the build output directory) into step outputs. These step outputs can be used throughout the workflow file.
id: strings
shell: bash
run: |
echo "build-output-dir=${{ github.workspace }}/build" >> "$GITHUB_OUTPUT"
# - name: Clone Playerbot Module
# run: git clone --depth=1 --branch=master https://github.com/liyunfan1223/mod-playerbots.git modules/mod-playerbots
# - name: Checkout Playerbot Module
# uses: actions/checkout@v3
# with:
# repository: 'liyunfan1223/mod-playerbots'
# path: 'modules/mod-playerbots'
- name: Install Requirements
run: sudo apt-get update && sudo apt-get install git cmake make gcc g++ clang libmysqlclient-dev libssl-dev libbz2-dev libreadline-dev libncurses-dev mysql-server libboost-all-dev
# - name: Cache
# uses: actions/cache@v3
# with:
# path: var/ccache
# key: ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules:${{ github.ref }}:${{ github.sha }}
# restore-keys: |
# ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules:${{ github.ref }}
# ccache:${{ matrix.os }}:${{ matrix.compiler }}:${{ matrix.modules }}-modules
# - name: Configure OS
# run: source ./acore.sh install-deps
# env:
# CONTINUOUS_INTEGRATION: true
# - name: Create conf/config.sh
# run: source ./apps/ci/ci-conf-core.sh
# - name: Process pending sql
# run: bash bin/acore-db-pendings
# - name: Build
# run: source ./apps/ci/ci-compile.sh
- name: Configure CMake
# Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make.
# See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type
run: >
cmake -B ${{ steps.strings.outputs.build-output-dir }}
-DCMAKE_CXX_COMPILER=${{ matrix.cpp_compiler }}
-DCMAKE_C_COMPILER=${{ matrix.c_compiler }}
-DCMAKE_BUILD_TYPE=${{ matrix.build_type }}
-S ${{ github.workspace }}
- name: Build
# Build your program with the given configuration. Note that --config is needed because the default Windows generator is a multi-config generator (Visual Studio generator).
run: cmake --build ${{ steps.strings.outputs.build-output-dir }} --config ${{ matrix.build_type }}
# - name: Test
# working-directory: ${{ steps.strings.outputs.build-output-dir }}
# # Execute tests defined by the CMake configuration. Note that --build-config is needed because the default Windows generator is a multi-config generator (Visual Studio generator).
# # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
# run: ctest --build-config ${{ matrix.build_type }}

View File

@ -0,0 +1,53 @@
name: nopch-module-build
on:
push:
branches:
- 'master'
pull_request:
types:
- opened
- reopened
- synchronize
paths:
- 'src/*'
- 'src/common/**/*'
- 'src/genrev/**/*'
- 'src/server/*'
- 'src/server/apps/**/*'
- 'src/server/database/**/*'
- 'src/server/game/**/*'
- 'src/server/shared/**/*'
- 'src/tools/**/*'
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
jobs:
build-modules:
strategy:
fail-fast: true
matrix:
include:
- os: ubuntu-24.04
compiler:
CC: clang-18
CXX: clang++-18
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}-nopch-modules
if: github.repository == 'liyunfan1223/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
# This script installs a general list of modules to compile with
# azerothcore. This is useful for ensuring that module compilation
# functionality works.
- name: Checkout modules
run: bash -x ./apps/ci/ci-install-modules.sh
- uses: ./.github/actions/linux-build
with:
CC: ${{ matrix.compiler.CC }}
CXX: ${{ matrix.compiler.CXX }}
modules: true
pch: false
maxerrors: 0
keepgoing: true

120
.github/workflows/docker_build.yml vendored Normal file
View File

@ -0,0 +1,120 @@
name: docker-build
on:
push:
branches:
- 'master'
pull_request:
types:
- labeled
- synchronize
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
env:
COMPOSE_DOCKER_CLI_BUILD: 1
DOCKER_BUILDKIT: 1
RUNNING_ON_PRIMARY_BRANCH: |
${{ (github.repository == 'liyunfan1223/azerothcore-wotlk' && github.ref_name == 'master') && 'true' || 'false' }}
jobs:
build-containers:
runs-on: "ubuntu-latest"
if: |
github.repository == 'liyunfan1223/azerothcore-wotlk'
&& !github.event.pull_request.draft
&& (github.ref_name == 'master' || contains(github.event.pull_request.labels.*.name, 'run-build') || github.event.label.name == 'run-build')
steps:
- name: Free up disk space
run: |
sudo rm -rf /usr/local/lib/android
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
- uses: actions/checkout@v4
# The containers created in this workflow are used by
# acore-docker, which has a dependency on mod-eluna.
#
# If you're wanting containers without mod-eluna, the best solution is to
# build them locally (such as with `docker compose build`)
- name: Download Eluna
if: github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master'
uses: actions/checkout@v4
with:
repository: azerothcore/mod-eluna
path: modules/mod-eluna
- name: Login to Docker Hub
if: github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Get version
id: version
run: |
version="$(jq -r '.version' acore.json)"
echo "version=$version" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: build worldserver
uses: ./.github/actions/docker-tag-and-build
with:
component-name: worldserver
version: ${{ steps.version.outputs.version }}
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
- name: build authserver
uses: ./.github/actions/docker-tag-and-build
with:
component-name: authserver
version: ${{ steps.version.outputs.version }}
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
- name: build db-import
uses: ./.github/actions/docker-tag-and-build
with:
component-name: db-import
version: ${{ steps.version.outputs.version }}
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
- name: build client-data
uses: ./.github/actions/docker-tag-and-build
with:
component-name: client-data
version: ${{ steps.version.outputs.version }}
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
- name: build tools
uses: ./.github/actions/docker-tag-and-build
with:
component-name: tools
version: ${{ steps.version.outputs.version }}
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
- name: build dev-server
uses: ./.github/actions/docker-tag-and-build
with:
component-name: dev
version: ${{ steps.version.outputs.version }}
push: ${{ env.RUNNING_ON_PRIMARY_BRANCH }}
dockerfile: apps/docker/Dockerfile.dev-server
- name: Trigger acore-docker CI
if: github.repository == 'azerothcore/azerothcore-wotlk' && github.ref_name == 'master'
uses: peter-evans/repository-dispatch@v2
with:
token: ${{ secrets.ACORE_DOCKER_REPO_ACCESS_TOKEN }}
repository: azerothcore/acore-docker
event-type: azerothcore-new-images
client-payload: >
{
"ref": "${{ github.ref }}",
"sha": "${{ github.sha }}"
}

49
.github/workflows/import_pending.yml vendored Normal file
View File

@ -0,0 +1,49 @@
name: import-pending
on:
push:
branches:
- master
jobs:
import-pending:
strategy:
fail-fast: false
runs-on: ubuntu-24.04
permissions: write-all
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
# If we're fetching all the history in a later step it makes sense to
# pre-load it now
fetch-depth: 0
ref: ${{ github.ref_name }}
- uses: denoland/setup-deno@v1
with:
# Specifies latest 1.x
deno-version: "~1.0"
- name: Import and commit pending sql
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
# Get the latest changes from git
git pull --rebase origin "${{ github.ref_name }}"
bash bin/acore-db-pendings
deno run --allow-all --unstable apps/ci/ci-pending-changelogs.ts
git add -A .
git commit -am "chore(DB): import pending files" -m "Referenced commit(s): ${GITHUB_SHA}" || true
env:
# Noting that the branch name can only be master, as per the event
# triggering this action
BRANCH: ${{ github.ref_name }}
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.AC_GITHUB_TOKEN }}
# Noting that the branch name can only be master, as per the event
# triggering this action
branch: ${{ github.ref_name }}

16
.github/workflows/issue-labeler.yml vendored Normal file
View File

@ -0,0 +1,16 @@
name: "Issue Labeler"
on:
issues:
types: [opened]
jobs:
issue_labeler:
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
runs-on: ubuntu-latest
name: Issue Labeler
steps:
- name: Issue Labeler
id: issue-labeler
uses: azerothcore/GitHub-Actions@issue-labeler-1.0.2
with:
token: ${{ secrets.GITHUB_TOKEN }}

36
.github/workflows/macos_build.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: macos-build
on:
push:
branches: [ "Playerbot" ]
pull_request:
branches: [ "Playerbot" ]
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
jobs:
macos-build:
strategy:
fail-fast: false
matrix:
os:
- macos-14
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Cache
uses: actions/cache@v4
with:
path: ~/Library/Caches/ccache
key: ccache:${{ matrix.os }}:${{ github.ref }}:${{ github.sha }}
restore-keys: |
ccache:${{ matrix.os }}:${{ github.ref }}
ccache:${{ matrix.os }}
- name: Install latest bash
run: brew install bash
- name: Configure OS
run: source ./acore.sh install-deps
- name: Build
run: source ./apps/ci/mac/ci-compile.sh

18
.github/workflows/pr_labeler.yml vendored Normal file
View File

@ -0,0 +1,18 @@
name: PR Labeler
on:
- pull_request_target
jobs:
triage:
runs-on: ubuntu-24.04
permissions: write-all
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
with:
persist-credentials: true
- uses: actions/labeler@v5
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
configuration-path: .github/labeler.yml
sync-labels: true

25
.github/workflows/sql-codestyle.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Codestyle
on:
pull_request:
types:
- opened
- reopened
- synchronize
paths:
- data/**
- "!README.md"
- "!docs/**"
jobs:
triage:
runs-on: ubuntu-latest
name: SQL
if: github.repository == 'azerothcore/azerothcore-wotlk' && !github.event.pull_request.draft
steps:
- uses: actions/checkout@v4
- name: Setup python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: AzerothCore codestyle
run: python ./apps/codestyle/codestyle-sql.py

40
.github/workflows/tools_build.yml vendored Normal file
View File

@ -0,0 +1,40 @@
name: tools
on:
push:
branches:
- 'master'
pull_request:
types:
- labeled
- synchronize
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
jobs:
build:
strategy:
fail-fast: true
matrix:
include:
- os: ubuntu-24.04
compiler:
CC: clang-18
CXX: clang++-18
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}-${{ matrix.compiler.CC }}
if: |
github.repository == 'liyunfan1223/azerothcore-wotlk' && !github.event.pull_request.draft
&& (
contains(github.event.pull_request.labels.*.name, 'run-build')
|| github.event.label.name == 'run-build'
)
steps:
- uses: actions/checkout@v4
- uses: ./.github/actions/linux-build
with:
CC: ${{ matrix.compiler.CC }}
CXX: ${{ matrix.compiler.CXX }}
tools: all
pch: false

36
.github/workflows/windows_build.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: windows-build
on:
push:
branches: [ "Playerbot" ]
pull_request:
branches: [ "Playerbot" ]
concurrency:
group: ${{ github.head_ref }} || concat(${{ github.ref }}, ${{ github.workflow }})
cancel-in-progress: true
jobs:
windows-build:
strategy:
fail-fast: false
matrix:
os: [windows-latest]
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }}
env:
BOOST_ROOT: C:\local\boost_1_82_0
steps:
- uses: actions/checkout@v4
- name: ccache
uses: hendrikmuhs/ccache-action@v1.2.13
- name: Configure OS
shell: bash
env:
CONTINUOUS_INTEGRATION: true
run: |
./acore.sh install-deps
- name: Build
shell: bash
run: |
export CTOOLS_BUILD=all
./acore.sh compiler build

101
.gitignore vendored Normal file
View File

@ -0,0 +1,101 @@
#
# AzerothCore
#
/conf/*
!/conf/dist
!/modules/*.md
!/modules/*.sh
!/modules/CMakeLists.txt
!/modules/*.h
!/modules/*.cmake
/build*/
/var/*
!/var/build/.gitkeep
!/var/ccache/.gitkeep
/env/dist/*
!/env/dist/.gitkeep
/env/user/*
/.env*
/apps/joiner
/deps/deno
/data/sql/custom/*
/src/server/scripts/Custom/*
!/src/server/scripts/Custom/README.md
/*.override.yml
/*.override.yaml
!.gitkeep
# default build directory if not specified by CMAKE configuration
/out/*
#
#Generic
#
.directory
.mailmap
*.orig
*.rej
*~
.hg/
*.kdev*
.DS_Store
CMakeLists.txt.user
*.bak
*.patch
*.diff
*.REMOTE.*
*.BACKUP.*
*.BASE.*
*.LOCAL.*
#
# IDE & other software
#
/.settings/
/.externalToolBuilders/*
/.vs
/out
# exclude in all levels
nbproject/
.sync.ffs_db
*.kate-swp
.browse.VC*
.idea
cmake-build-*/*
coverage-report/
.vs
#
# Eclipse
#
*.pydevproject
.metadata
.gradle
tmp/
*.tmp
*.swp
*~.nib
local.properties
.settings/
.loadpath
.project
.cproject
# ==================
#
# CUSTOM
#
# put below your custom ignore rules
# for example , if you want to include a
# module directly in repositoryyou can do:
#
# !modules/yourmodule
#
# ==================
.cache
compile_commands.json

1
.suppress.cppcheck Normal file
View File

@ -0,0 +1 @@
cppcheckError

16
.vscode/extensions.json vendored Normal file
View File

@ -0,0 +1,16 @@
{
"recommendations": [
"ms-vscode-remote.remote-containers",
"notskm.clang-tidy",
"xaver.clang-format",
"bbenoist.doxygen",
"ms-vscode.cpptools",
"austin.code-gnu-global",
"twxs.cmake",
"mhutchie.git-graph",
"github.vscode-pull-request-github",
"eamodio.gitlens",
"cschlosser.doxdocgen",
"sanaajani.taskrunnercode"
]
}

68
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,68 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Linux/Docker debug",
"type": "cppdbg",
"request": "launch",
"program": "/azerothcore/env/dist/bin/worldserver",
"cwd": "/azerothcore",
"args": [],
"environment": [],
"externalConsole": false,
"sourceFileMap": {
"/azerothcore": "${workspaceFolder}"
},
"linux": {
"MIMode": "gdb",
"miDebuggerPath": "/usr/bin/gdb",
"setupCommands": [
{
"description": "Enable pretty-printing for gdb",
"text": "-enable-pretty-printing",
"ignoreFailures": false
}
]
}
},
{
"name": "(docker run) Pipe Launch",
"type": "cppdbg",
"request": "launch",
"program": "/azerothcore/env/dist/bin/worldserver",
"cwd": "/azerothcore",
"args": [],
"environment": [],
"externalConsole": true,
"pipeTransport": {
"debuggerPath": "/usr/bin/gdb",
"pipeProgram": "docker compose",
"pipeArgs": [
"exec", "-T", "ac-worldserver", "sh", "-c"
],
"pipeCwd": "${workspaceFolder}"
},
"sourceFileMap": {
"/azerothcore": "${workspaceFolder}"
},
"linux": {
"MIMode": "gdb",
"miDebuggerPath": "/usr/bin/gdb",
"setupCommands": [
{
"description": "Enable pretty-printing for gdb",
"text": "-enable-pretty-printing",
"ignoreFailures": false
}
]
},
"osx": {
"MIMode": "lldb"
},
"windows": {
"MIMode": "gdb",
"miDebuggerPath": "C:\\MinGw\\bin\\gdb.exe"
}
}
]
}

124
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,124 @@
{
"files.associations": {
"*.dist": "properties",
"*.crash": "properties",
"*.wtf": "properties",
"*.cnf": "properties",
"array": "cpp",
"atomic": "cpp",
"bit": "cpp",
"*.tcc": "cpp",
"bitset": "cpp",
"cctype": "cpp",
"chrono": "cpp",
"cinttypes": "cpp",
"clocale": "cpp",
"cmath": "cpp",
"complex": "cpp",
"condition_variable": "cpp",
"csignal": "cpp",
"cstdarg": "cpp",
"cstddef": "cpp",
"cstdint": "cpp",
"cstdio": "cpp",
"cstdlib": "cpp",
"cstring": "cpp",
"ctime": "cpp",
"cwchar": "cpp",
"cwctype": "cpp",
"deque": "cpp",
"list": "cpp",
"map": "cpp",
"set": "cpp",
"unordered_map": "cpp",
"unordered_set": "cpp",
"vector": "cpp",
"exception": "cpp",
"algorithm": "cpp",
"functional": "cpp",
"iterator": "cpp",
"memory": "cpp",
"memory_resource": "cpp",
"numeric": "cpp",
"optional": "cpp",
"random": "cpp",
"ratio": "cpp",
"string": "cpp",
"string_view": "cpp",
"system_error": "cpp",
"tuple": "cpp",
"type_traits": "cpp",
"utility": "cpp",
"fstream": "cpp",
"initializer_list": "cpp",
"iomanip": "cpp",
"iosfwd": "cpp",
"iostream": "cpp",
"istream": "cpp",
"limits": "cpp",
"mutex": "cpp",
"new": "cpp",
"ostream": "cpp",
"shared_mutex": "cpp",
"sstream": "cpp",
"stdexcept": "cpp",
"streambuf": "cpp",
"thread": "cpp",
"cfenv": "cpp",
"typeinfo": "cpp",
"codecvt": "cpp",
"xstring": "cpp",
"variant": "cpp",
"any": "cpp",
"barrier": "cpp",
"charconv": "cpp",
"compare": "cpp",
"concepts": "cpp",
"coroutine": "cpp",
"csetjmp": "cpp",
"execution": "cpp",
"filesystem": "cpp",
"format": "cpp",
"forward_list": "cpp",
"future": "cpp",
"ios": "cpp",
"latch": "cpp",
"locale": "cpp",
"numbers": "cpp",
"queue": "cpp",
"ranges": "cpp",
"regex": "cpp",
"scoped_allocator": "cpp",
"semaphore": "cpp",
"source_location": "cpp",
"span": "cpp",
"stack": "cpp",
"stop_token": "cpp",
"strstream": "cpp",
"syncstream": "cpp",
"typeindex": "cpp",
"valarray": "cpp",
"xfacet": "cpp",
"xhash": "cpp",
"xiosbase": "cpp",
"xlocale": "cpp",
"xlocbuf": "cpp",
"xlocinfo": "cpp",
"xlocmes": "cpp",
"xlocmon": "cpp",
"xlocnum": "cpp",
"xloctime": "cpp",
"xmemory": "cpp",
"xstddef": "cpp",
"xtr1common": "cpp",
"xtree": "cpp",
"xutility": "cpp",
"*.ipp": "cpp",
"resumable": "cpp"
},
"deno.enable": true,
"deno.path": "deps/deno/bin/deno",
"deno.lint": true,
"search.useIgnoreFiles": false,
"clangd.onConfigChanged": "restart"
}

85
.vscode/tasks.json vendored Normal file
View File

@ -0,0 +1,85 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "AzerothCore: Dashboard",
"type": "shell",
"command": "./acore.sh",
"group": "none",
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "AzerothCore: Import/update database",
"type": "shell",
"command": "./acore.sh db-assembler import-all",
"group": "build",
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "AzerothCore: download client-data",
"type": "shell",
"command": "./acore.sh client-data",
"group": "none",
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "AzerothCore: Clean build",
"type": "shell",
"command": "./acore.sh compiler clean",
"group": "build",
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "AzerothCore: Build",
"type": "shell",
"command": "./acore.sh compiler build",
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "AzerothCore: Run authserver (restarter)",
"type": "shell",
"command": "./acore.sh run-authserver",
"group": "none",
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "AzerothCore: Run worldserver (restarter)",
"type": "shell",
"command": "./acore.sh run-worldserver",
"group": "none",
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
}
]
}

36
AUTHORS Normal file
View File

@ -0,0 +1,36 @@
# List of AUTHORS who contributed over time to the AzerothCore project
## Warning
The code of AzerothCore is shipped as it is without any form of warranty,
and - except for third party libraries licensed under the AGPL 3,
which you can read from the file "LICENSE".
## Point of current development
The project is currently hosted at https://www.azerothcore.org/ and developed under https://github.com/azerothcore
## History of development
Development of this project dates back to 2004, and was developed under various umbrellas over time:
* WoW Daemon Team, 2004
* MaNGOS project, 2005-2008, located at http://www.mangosproject.org
* MaNGOS project, 2008-2011, located at http://getmangos.com
* SD2 project, 2008-2009, located at http://www.scriptdev2.com/
* TrinityCore, 2008-2012, located at https://www.trinitycore.org/
* SunwellCore 2012-2016, privately developed, more info at https://www.azerothcore.org/pages/sunwell.pl/
* AzerothCore, 2016-CURRENT, located at https://www.azerothcore.org/
## Authorship of the code
Authorship is assigned for each commit within the git history, which is stored in these git repositories:
* github.com/cmangos/mangos-svn (History from MaNGOS project from 2005-2008, originally hosted at http://mangos.svn.sourceforge.net)
* github.com/TrinityCore/TrinityCore
* github.com/azerothcore/azerothcore-wotlk
Unfortunately, we have no detailed information on the history of the WoWD project;
if somebody can provide information, please contact us, so that we can make this history available
SunwellCore was developed privately and has unfortunately no git history.
## Exceptions with third-party libraries
The third-party libraries have their own way of addressing authorship, and the authorship of commits importing/updating
a third-party library reflects who did the importing instead of who wrote the code within the commit.
The Authors of third-party libraries are not explicitly mentioned, and usually is possible to obtain from the files belonging to the third-party libraries.

177
CMakeLists.txt Normal file
View File

@ -0,0 +1,177 @@
#
# This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
#
# This file is free software; as a special exception the author gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# Require version Cmake
cmake_minimum_required(VERSION 3.16...3.22)
message(STATUS "CMake version: ${CMAKE_VERSION}")
# CMake policies (can not be handled elsewhere)
cmake_policy(SET CMP0005 NEW)
if (POLICY CMP0153)
cmake_policy(SET CMP0153 OLD)
endif()
# Set projectname (must be done AFTER setting configurationtypes)
project(AzerothCore VERSION 3.0.0 LANGUAGES CXX C)
# add this options before PROJECT keyword
set(CMAKE_DISABLE_SOURCE_CHANGES ON)
set(CMAKE_DISABLE_IN_SOURCE_BUILD ON)
# Set RPATH-handing (CMake parameters)
set(CMAKE_SKIP_BUILD_RPATH 0)
set(CMAKE_BUILD_WITH_INSTALL_RPATH 0)
set(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH 1)
set(AC_PATH_ROOT "${CMAKE_SOURCE_DIR}")
# set macro-directory
list(APPEND CMAKE_MODULE_PATH
"${CMAKE_SOURCE_DIR}/src/cmake/macros")
include(CheckCXXSourceRuns)
include(CheckIncludeFiles)
include(ConfigureScripts)
include(ConfigureModules)
include(ConfigureApplications)
include(ConfigureTools)
# some utils for cmake
include(deps/acore/cmake-utils/utils.cmake)
include(src/cmake/ac_macros.cmake)
# set default buildoptions and print them
include(conf/dist/config.cmake)
# load custom configurations for cmake if exists
if(EXISTS "${CMAKE_SOURCE_DIR}/conf/config.cmake")
include(conf/config.cmake)
endif()
#
# Loading dyn modules
#
# add modules and dependencies
CU_SUBDIRLIST(sub_DIRS "${CMAKE_SOURCE_DIR}/modules" FALSE FALSE)
FOREACH(subdir ${sub_DIRS})
get_filename_component(MODULENAME ${subdir} NAME)
if (";${DISABLED_AC_MODULES};" MATCHES ";${MODULENAME};")
continue()
endif()
STRING(REPLACE "${CMAKE_SOURCE_DIR}/" "" subdir_rel ${subdir})
if(EXISTS "${subdir}/CMakeLists.txt")
add_subdirectory("${subdir_rel}")
endif()
ENDFOREACH()
CU_RUN_HOOK("AFTER_LOAD_CONF")
# build in Release-mode by default if not explicitly set
if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE "RelWithDebInfo")
endif()
# turn off PCH totally if enabled (hidden setting, mainly for devs)
if( NOPCH )
set(USE_COREPCH 0)
set(USE_SCRIPTPCH 0)
endif()
include(ConfigureBaseTargets)
include(CheckPlatform)
include(GroupSources)
include(AutoCollect)
include(ConfigInstall)
CU_RUN_HOOK("AFTER_LOAD_CMAKE_MODULES")
find_package(PCHSupport)
find_package(MySQL REQUIRED)
if(UNIX AND WITH_PERFTOOLS)
find_package(Gperftools)
endif()
if(NOT WITHOUT_GIT)
find_package(Git)
endif()
# Find revision ID and hash of the sourcetree
include(src/cmake/genrev.cmake)
# print out the results before continuing
include(src/cmake/showoptions.cmake)
#
# Loading framework
#
add_subdirectory(deps)
add_subdirectory(src/common)
#
# Loading application sources
#
CU_RUN_HOOK("BEFORE_SRC_LOAD")
# add core sources
add_subdirectory(src)
if (BUILD_APPLICATION_WORLDSERVER)
# add modules sources
add_subdirectory(modules)
endif()
CU_RUN_HOOK("AFTER_SRC_LOAD")
if (BUILD_TESTING AND BUILD_APPLICATION_WORLDSERVER)
# we use these flags to get code coverage
set(UNIT_TEST_CXX_FLAGS "-fprofile-arcs -ftest-coverage -fno-inline")
# enable additional flags for GCC.
if ( CMAKE_CXX_COMPILER_ID MATCHES GNU )
set(UNIT_TEST_CXX_FLAGS "${UNIT_TEST_CXX_FLAGS} -fno-inline-small-functions -fno-default-inline")
endif()
message("Unit tests code coverage: enabling ${UNIT_TEST_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${UNIT_TEST_CXX_FLAGS}")
include(src/cmake/googletest.cmake)
fetch_googletest(
${PROJECT_SOURCE_DIR}/src/cmake
${PROJECT_BINARY_DIR}/googletest
)
enable_testing()
add_subdirectory(src/test)
add_custom_target(coverage DEPENDS coverage_command)
add_custom_command(OUTPUT coverage_command
# Run unit tests.
COMMAND ctest
# Run the graphical front-end for code coverage.
COMMAND lcov --directory src --capture --output-file coverage.info
COMMAND lcov --remove coverage.info '/usr/*' '${CMAKE_BINARY_DIR}/googletest/*' '${CMAKE_CURRENT_SOURCE_DIR}/src/test/*' --output-file coverage.info
COMMAND genhtml -o ${CMAKE_CURRENT_SOURCE_DIR}/coverage-report coverage.info
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
)
endif()

661
LICENSE Normal file
View File

@ -0,0 +1,661 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<http://www.gnu.org/licenses/>.

23
PreLoad.cmake Normal file
View File

@ -0,0 +1,23 @@
# Copyright (C)
#
# This file is free software; as a special exception the author gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# This file is run right before CMake starts configuring the sourcetree
# Example: Force CMAKE_INSTALL_PREFIX to be preloaded with something before
# doing the actual first "configure"-part - allows for hardforcing
# destinations elsewhere in the CMake buildsystem (commented out on purpose)
# Override CMAKE_INSTALL_PREFIX on Windows platforms
#if( WIN32 )
# if( NOT CYGWIN )
# set(CMAKE_INSTALL_PREFIX
# "" CACHE PATH "Default install path")
# endif()
#endif()

5
acore.json Normal file
View File

@ -0,0 +1,5 @@
{
"name": "azerothcore-wotlk",
"version": "13.0.0-dev.1",
"license": "AGPL3"
}

8
acore.sh Normal file
View File

@ -0,0 +1,8 @@
#!/usr/bin/env bash
[ -z "$WITH_ERRORS" ] && set -e
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CUR_PATH/apps/installer/main.sh"

View File

@ -0,0 +1,234 @@
########################################################################################
# SETTINGS #
########################################################################################
$mysql_host = "127.0.0.1"
$mysql_user = "export"
$mysql_password = "export"
$mysql_database_auth = "acore_auth"
$mysql_database_characters = "acore_characters"
$mysql_database_world = "acore_world"
########################################################################################
# SETTINGS END #
########################################################################################
# Set MySQL password as temporary env var
$env:MYSQL_PWD = $mysql_password
# Get the directory to sql\base directory
$scriptDirectory = $PSScriptRoot
$relativePath = "..\..\data\sql\base"
$combinedPath = Join-Path -Path $scriptDirectory -ChildPath $relativePath
$fullPath = Resolve-Path -Path $combinedPath
# Define the output directory (using database name)
$output_directory_auth = "$fullPath\db_auth"
$output_directory_characters = "$fullPath\db_characters"
$output_directory_world = "$fullPath\db_world"
Write-Host " ___ _ _ ___ "
Write-Host "/ \ ___ ___ _ _ ___ | |_ | |_ / __| ___ _ _ ___ "
Write-Host "| - ||_ // -_)| '_|/ _ \| _|| \ | (__ / _ \| '_|/ -_)"
Write-Host "|_|_|/__|\___||_| \___/ \__||_||_| \___|\___/|_| \___|"
Write-Host "AzerothCore 3.3.5a - www.azerothcore.org"
Write-Host ""
Write-Host "Welcome to the AzerothCore Database Exporter for database squashes!"
Write-Host ""
Write-Host "You have configured:"
Write-Host "Database Auth: '$mysql_database_auth'"
Write-Host "Database Characters: '$mysql_database_characters'"
Write-Host "Database World: '$mysql_database_world'"
Write-Host "Output Dir Auth: '$output_directory_auth'"
Write-Host "Output Dir Characters: '$output_directory_characters'"
Write-Host "Output Dir World: '$output_directory_world'"
Write-Host ""
Write-Host "Make sure you read the entire process before you continue."
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/apps/DatabaseExporter/databaseexporter.md"
Write-Host ""
# Check if the user wants to continue using the tool
do {
$confirmation = Read-Host "Do you want to continue using the tool? (Y/N)"
if ($confirmation -eq 'Y' -or $confirmation -eq 'y') {
# Continue the script
Write-Host "AzerothCore Database Exporter starts."
$continue = $true
}
elseif ($confirmation -eq 'N' -or $confirmation -eq 'n') {
# Exit the script
Write-Host "Exiting the AzerothCore Database Exporter."
exit
}
else {
Write-Host "Invalid input. Please enter Y or N."
$continue = $null
}
} while ($continue -eq $null)
# Remove the output directory if it exist
if (Test-Path $output_directory_auth) {
Remove-Item -Path $output_directory_auth -Recurse -Force
Write-Host "Deleted directory $output_directory_auth"
}
if (Test-Path $output_directory_characters) {
Remove-Item -Path $output_directory_characters -Recurse -Force
Write-Host "Deleted directory $output_directory_characters"
}
if (Test-Path $output_directory_world) {
Remove-Item -Path $output_directory_world -Recurse -Force
Write-Host "Deleted directory $output_directory_world"
}
# Create the output directory if it doesn't exist
if (-not (Test-Path -Path $output_directory_auth)) {
New-Item -ItemType Directory -Force -Path $output_directory_auth
Write-Host "Created directory $output_directory_auth"
}
if (-not (Test-Path -Path $output_directory_characters)) {
New-Item -ItemType Directory -Force -Path $output_directory_characters
Write-Host "Created directory $output_directory_characters"
}
if (-not (Test-Path -Path $output_directory_world)) {
New-Item -ItemType Directory -Force -Path $output_directory_world
Write-Host "Created directory $output_directory_world"
}
# Fix for dumping TIMESTAMP data
$timezone = "+01:00"
$mysqlCommand = "SET time_zone = '$timezone';"
$mysqlExec = "mysql -h $mysql_host -u $mysql_user -p$mysql_password -e `"$mysqlCommand`""
Invoke-Expression -Command $mysqlExec
# PS script uses non-utf-8 encoding by default
# https://stackoverflow.com/a/58438716
# Save the current encoding and switch to UTF-8.
$prev = [Console]::OutputEncoding
[Console]::OutputEncoding = [System.Text.UTF8Encoding]::new()
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT AUTH DATABASE START"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Please enter your password for user '$mysql_user'"
# Export Auth Database
# Connect to MySQL and get all the tables
$tables_auth = mysql -h $mysql_host -u $mysql_user -D $mysql_database_auth -e "SHOW TABLES;" | Select-Object -Skip 1
# Iterate through each table and export both the structure and contents into the same SQL file
foreach ($table in $tables_auth) {
# Define the output file path for this table
$output_file = "$output_directory_auth\$table.sql"
# Clear the content of the output file if it exists, or create a new one
if (Test-Path $output_file) {
Clear-Content -Path $output_file
}
# Export the table structure (CREATE TABLE) and table data (INSERT) to the SQL file
$create_table_command = "mysqldump -h $mysql_host -u $mysql_user --skip-tz-utc $mysql_database_auth $table"
$create_table_output = Invoke-Expression -Command $create_table_command
# write file with utf-8 encoding
# https://stackoverflow.com/a/32951824
[IO.File]::WriteAllLines($output_file, $create_table_output)
# Format the INSERT values to be on seperate lines.
$content = Get-Content -Raw $output_file
$formattedContent = $content -replace 'VALUES \(', "VALUES`r`n("
$formattedContent = $formattedContent -replace '\),', "),`r`n"
$formattedContent | Set-Content $output_file
Write-Host "Exported structure and data for table $table to $output_file"
}
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT AUTH DATABASE END"
Write-Host "#########################################################"
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT CHARACTERS DATABASE START"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Please enter your password for user '$mysql_user'"
# Export Characters Database
# Connect to MySQL and get all the tables
$tables_characters = mysql -h $mysql_host -u $mysql_user -D $mysql_database_characters -e "SHOW TABLES;" | Select-Object -Skip 1
# Iterate through each table and export both the structure and contents into the same SQL file
foreach ($table in $tables_characters) {
# Define the output file path for this table
$output_file = "$output_directory_characters\$table.sql"
# Clear the content of the output file if it exists, or create a new one
if (Test-Path $output_file) {
Clear-Content -Path $output_file
}
# Export the table structure (CREATE TABLE) and table data (INSERT) to the SQL file
$create_table_command = "mysqldump -h $mysql_host -u $mysql_user --skip-tz-utc $mysql_database_characters $table"
$create_table_output = Invoke-Expression -Command $create_table_command
# write file with utf-8 encoding
# https://stackoverflow.com/a/32951824
[IO.File]::WriteAllLines($output_file, $create_table_output)
# Format the INSERT values to be on seperate lines.
$content = Get-Content -Raw $output_file
$formattedContent = $content -replace 'VALUES \(', "VALUES`r`n("
$formattedContent = $formattedContent -replace '\),', "),`r`n"
$formattedContent | Set-Content $output_file
Write-Host "Exported structure and data for table $table to $output_file"
}
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT CHARACTERS DATABASE END"
Write-Host "#########################################################"
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT WORLD DATABASE START"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Please enter your password for user '$mysql_user'"
# Export World Database
# Connect to MySQL and get all the tables
$tables_world = mysql -h $mysql_host -u $mysql_user -D $mysql_database_world -e "SHOW TABLES;" | Select-Object -Skip 1
# Iterate through each table and export both the structure and contents into the same SQL file
foreach ($table in $tables_world) {
# Define the output file path for this table
$output_file = "$output_directory_world\$table.sql"
# Clear the content of the output file if it exists, or create a new one
if (Test-Path $output_file) {
Clear-Content -Path $output_file
}
# Export the table structure (CREATE TABLE) and table data (INSERT) to the SQL file
$create_table_command = "mysqldump -h $mysql_host -u $mysql_user --skip-tz-utc $mysql_database_world $table"
$create_table_output = Invoke-Expression -Command $create_table_command
# write file with utf-8 encoding
# https://stackoverflow.com/a/32951824
[IO.File]::WriteAllLines($output_file, $create_table_output)
# Format the INSERT values to be on seperate lines.
$content = Get-Content -Raw $output_file
$formattedContent = $content -replace 'VALUES \(', "VALUES`r`n("
$formattedContent = $formattedContent -replace '\),', "),`r`n"
$formattedContent | Set-Content $output_file
Write-Host "Exported structure and data for table $table to $output_file"
}
Write-Host ""
Write-Host "#########################################################"
Write-Host "EXPORT WORLD DATABASE END"
Write-Host "#########################################################"
Write-Host ""
Write-Host "Database Exporter completed."
Write-Host "Have a nice day :)"
# Restore the previous encoding.
[Console]::OutputEncoding = $prev

View File

@ -0,0 +1,85 @@
# The AzerothCore Database Exporter for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Manual setting updates
Update the settings in `DatabaseExporter.ps1` to reflect your setup by opening it with your preffered text editor.
> [!NOTE]
> Only update the settings within the SETTINGS block.
These are the default settings:
```ps
########################################################################################
# SETTINGS #
########################################################################################
$mysql_host = "127.0.0.1"
$mysql_user = "export"
$mysql_password = "export"
$mysql_database_auth = "acore_auth"
$mysql_database_characters = "acore_characters"
$mysql_database_world = "acore_world"
########################################################################################
# SETTINGS END #
########################################################################################
```
## Description of the tool
This tool updates the base files automatically. Hence, it must run from this directory.
This is how it works step-by-step:
1. Check that all paths look correct.
2. Accept to continue using the tool.
3. The tool will delete the `db_auth` `db_characters` `db_world` directories in `..\..\data\sql\base\`
4. The tool will create the `db_auth` `db_characters` `db_world` directories in `..\..\data\sql\base\`
5. The tool will export the `db_auth` table into `..\..\data\sql\base\db_auth\`
6. The tool will export the `db_characters` table into `..\..\data\sql\base\db_characters\`
7. The tool will export the `db_world` table into `..\..\data\sql\base\db_world\`
## Run the tool
> [!IMPORTANT]
> This tool CAN NOT be moved outside this directory. If you do it will create files in the wrong places.
1. Make sure you have MySQL installed on your system and that the mysqldump tool is accessible by your PATH system variable. If it is not set you will encounter errors.
- Go into System Variables
- Open the PATH variable
- Add the path to your $\MySQL Server\bin\ - e.g. C:\Program Files\MySQL\MySQL Server 8.4\bin\
2. If you haven't run PowerShell scripts before, you'll need to adjust the execution policy.
- Open PowerShell as an Administrator.
- Run the following command to allow running scripts:
```ps
Set-ExecutionPolicy RemoteSigned -Scope CurrentUser
```
- This allows scripts to run on your system, but they need to be locally created or downloaded from trusted sources.
3. Open PowerShell (PS)
- Press Win + X and select Windows PowerShell (Admin) / Terminal (Admin)
4. Navigate to the script
- In PS, use the `cd` command to change the directory
```ps
cd "C:\AzerothCore\apps\DatabaseExporter"
```
5. Run the script
- In PS, run the script
```ps
.\DatabaseExporter.ps1
```
6. Follow the instructions given by the tool.
7. Now refer back to the database-squash.md instructions. (Located in ..\..\data\sql\base\)
Completed :)

View File

@ -0,0 +1,163 @@
from re import compile, MULTILINE
from os import walk, getcwd
notice = ('''/*
* This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published by the
* Free Software Foundation; either version 3 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
''')
if not getcwd().endswith('src'):
print('Run this from the src directory!')
print('(Invoke as \'python ../apps/EnumUtils/enumutils_describe.py\')')
exit(1)
EnumPattern = compile(r'//\s*EnumUtils: DESCRIBE THIS(?:\s*\(in ([^\)]+)\))?\s+enum\s+([0-9A-Za-z]+)[^\n]*\s*{([^}]+)};')
EnumValuesPattern = compile(r'\s+\S.+?(,|$)[^\n]*')
EnumValueNamePattern = compile(r'^\s*([a-zA-Z0-9_]+)', flags=MULTILINE)
EnumValueSkipLinePattern = compile(r'^\s*//')
EnumValueCommentPattern = compile(r'//,?[ \t]*([^\n]+)$')
CommentMatchFormat = compile(r'^(((TITLE +(.+?))|(DESCRIPTION +(.+?))) *){1,2}$')
CommentSkipFormat = compile(r'^SKIP *$')
def strescape(str):
res = ''
for char in str:
if char in ('\\', '"') or not (32 <= ord(char) < 127):
res += ('\\%03o' % ord(char))
else:
res += char
return '"' + res + '"'
def processFile(path, filename):
input = open('%s/%s.h' % (path, filename),'r')
if input is None:
print('Failed to open %s.h' % filename)
return
file = input.read()
enums = []
for enum in EnumPattern.finditer(file):
prefix = enum.group(1) or ''
name = enum.group(2)
values = []
for value in EnumValuesPattern.finditer(enum.group(3)):
valueData = value.group(0)
valueNameMatch = EnumValueNamePattern.search(valueData)
if valueNameMatch is None:
if EnumValueSkipLinePattern.search(valueData) is None:
print('Name of value not found: %s' % repr(valueData))
continue
valueName = valueNameMatch.group(1)
valueCommentMatch = EnumValueCommentPattern.search(valueData)
valueComment = None
if valueCommentMatch:
valueComment = valueCommentMatch.group(1)
valueTitle = None
valueDescription = None
if valueComment is not None:
if CommentSkipFormat.match(valueComment) is not None:
continue
commentMatch = CommentMatchFormat.match(valueComment)
if commentMatch is not None:
valueTitle = commentMatch.group(4)
valueDescription = commentMatch.group(6)
else:
valueDescription = valueComment
if valueTitle is None:
valueTitle = valueName
if valueDescription is None:
valueDescription = ''
values.append((valueName, valueTitle, valueDescription))
enums.append((prefix + name, prefix, values))
print('%s.h: Enum %s parsed with %d values' % (filename, name, len(values)))
if not enums:
return
print('Done parsing %s.h (in %s)\n' % (filename, path))
output = open('%s/enuminfo_%s.cpp' % (path, filename), 'w')
if output is None:
print('Failed to create enuminfo_%s.cpp' % filename)
return
# write output file
output.write(notice)
output.write('#include "%s.h"\n' % filename)
output.write('#include "Define.h"\n')
output.write('#include "SmartEnum.h"\n')
output.write('#include <stdexcept>\n')
output.write('\n')
output.write('namespace Acore::Impl::EnumUtilsImpl\n')
output.write('{\n')
for name, prefix, values in enums:
tag = ('data for enum \'%s\' in \'%s.h\' auto-generated' % (name, filename))
output.write('\n')
output.write('/*' + ('*'*(len(tag)+2)) + '*\\\n')
output.write('|* ' + tag + ' *|\n')
output.write('\\*' + ('*'*(len(tag)+2)) + '*/\n')
output.write('template <>\n')
output.write('AC_API_EXPORT EnumText EnumUtils<%s>::ToString(%s value)\n' % (name, name))
output.write('{\n')
output.write(' switch (value)\n')
output.write(' {\n')
for label, title, description in values:
output.write(' case %s: return { %s, %s, %s };\n' % (prefix + label, strescape(label), strescape(title), strescape(description)))
output.write(' default: throw std::out_of_range("value");\n')
output.write(' }\n')
output.write('}\n')
output.write('\n')
output.write('template <>\n')
output.write('AC_API_EXPORT size_t EnumUtils<%s>::Count() { return %d; }\n' % (name, len(values)))
output.write('\n')
output.write('template <>\n')
output.write('AC_API_EXPORT %s EnumUtils<%s>::FromIndex(size_t index)\n' % (name, name))
output.write('{\n')
output.write(' switch (index)\n')
output.write(' {\n')
for (i, (label, title, description)) in enumerate(values):
output.write(' case %d: return %s;\n' % (i, prefix + label))
output.write(' default: throw std::out_of_range("index");\n')
output.write(' }\n')
output.write('}\n')
output.write('\n')
output.write('template <>\n')
output.write('AC_API_EXPORT size_t EnumUtils<%s>::ToIndex(%s value)\n' % (name, name))
output.write('{\n')
output.write(' switch (value)\n')
output.write(' {\n')
for (i, (label, title, description)) in enumerate(values):
output.write(' case %s: return %d;\n' % (prefix + label, i))
output.write(' default: throw std::out_of_range("value");\n')
output.write(' }\n')
output.write('}\n')
output.write('}\n')
FilenamePattern = compile(r'^(.+)\.h$')
for root, dirs, files in walk('.'):
for n in files:
nameMatch = FilenamePattern.match(n)
if nameMatch is not None:
processFile(root, nameMatch.group(1))

238
apps/Fmt/FormatReplace.py Normal file
View File

@ -0,0 +1,238 @@
import pathlib
from os import getcwd
if not getcwd().endswith('src') and not getcwd().endswith('modules'):
print('Run this from the src or modules directory!')
print('(Invoke as \'python ../apps/Fmt/FormatReplace.py\')')
exit(1)
def isASSERT(line):
substring = 'ASSERT'
if substring in line:
return True
else :
return False
def isABORTMSG(line):
substring = 'ABORT_MSG'
if substring in line:
return True
else :
return False
def islog(line):
substring = 'LOG_'
if substring in line:
return True
else :
return False
# def isSendSysMessage(line):
# substring = 'SendSysMessage'
# if substring in line:
# return True
# else :
# return False
# def isPSendSysMessage(line):
# substring = 'PSendSysMessage'
# if substring in line:
# return True
# else :
# return False
def isPQuery(line):
substring = 'PQuery'
if substring in line:
return True
else :
return False
def isPExecute(line):
substring = 'PExecute'
if substring in line:
return True
else :
return False
def isPAppend(line):
substring = 'PAppend'
if substring in line:
return True
else :
return False
def isStringFormat(line):
substring = 'StringFormat'
if substring in line:
return True
else :
return False
def haveDelimeter(line):
if ';' in line:
return True
else :
return False
def checkSoloLine(line):
if isABORTMSG(line):
line = line.replace("ABORT_MSG", "ABORT");
return handleCleanup(line), False
elif isASSERT(line):
return handleCleanup(line), False
elif islog(line):
return handleCleanup(line), False
elif isPExecute(line):
line = line.replace("PExecute", "Execute");
return handleCleanup(line), False
elif isPQuery(line):
line = line.replace("PQuery", "Query");
return handleCleanup(line), False
elif isPAppend(line):
line = line.replace("PAppend", "Append");
return handleCleanup(line), False
# elif isSendSysMessage(line):
# return handleCleanup(line), False
# elif isPSendSysMessage(line):
# return handleCleanup(line), False
elif isStringFormat(line):
return handleCleanup(line), False
else:
return line, False
def startMultiLine(line):
if isABORTMSG(line):
line = line.replace("ABORT_MSG", "ABORT");
return handleCleanup(line), True
elif isASSERT(line):
return handleCleanup(line), True
elif islog(line):
return handleCleanup(line), True
# elif isSendSysMessage(line):
# return handleCleanup(line), True
# elif isPSendSysMessage(line):
# return handleCleanup(line), True
elif isPQuery(line):
line = line.replace("PQuery", "Query");
return handleCleanup(line), True
elif isPExecute(line):
line = line.replace("PExecute", "Execute");
return handleCleanup(line), True
elif isPAppend(line):
line = line.replace("PAppend", "Append");
return handleCleanup(line), True
elif isStringFormat(line):
return handleCleanup(line), True
else :
return line, False
def continueMultiLine(line, existPrevLine):
if haveDelimeter(line):
existPrevLine = False;
return handleCleanup(line), existPrevLine
def checkTextLine(line, existPrevLine):
if existPrevLine:
return continueMultiLine(line, existPrevLine)
else :
if haveDelimeter(line):
return checkSoloLine(line)
else :
return startMultiLine(line)
def handleCleanup(line):
line = line.replace("%s", "{}");
line = line.replace("%u", "{}");
line = line.replace("%hu", "{}");
line = line.replace("%lu", "{}");
line = line.replace("%llu", "{}");
line = line.replace("%zu", "{}");
line = line.replace("%02u", "{:02}");
line = line.replace("%03u", "{:03}");
line = line.replace("%04u", "{:04}");
line = line.replace("%05u", "{:05}");
line = line.replace("%02i", "{:02}");
line = line.replace("%03i", "{:03}");
line = line.replace("%04i", "{:04}");
line = line.replace("%05i", "{:05}");
line = line.replace("%02d", "{:02}");
line = line.replace("%03d", "{:03}");
line = line.replace("%04d", "{:04}");
line = line.replace("%05d", "{:05}");
line = line.replace("%d", "{}");
line = line.replace("%i", "{}");
line = line.replace("%x", "{:x}");
line = line.replace("%X", "{:X}");
line = line.replace("%lx", "{:x}");
line = line.replace("%lX", "{:X}");
line = line.replace("%02X", "{:02X}");
line = line.replace("%08X", "{:08X}");
line = line.replace("%f", "{}");
line = line.replace("%.1f", "{0:.1f}");
line = line.replace("%.2f", "{0:.2f}");
line = line.replace("%.3f", "{0:.3f}");
line = line.replace("%.4f", "{0:.4f}");
line = line.replace("%.5f", "{0:.5f}");
line = line.replace("%3.1f", "{:3.1f}");
line = line.replace("%%", "%");
line = line.replace(".c_str()", "");
line = line.replace("\" SZFMTD \"", "{}");
line = line.replace("\" UI64FMTD \"", "{}");
# line = line.replace("\" STRING_VIEW_FMT \"", "{}");
# line = line.replace("STRING_VIEW_FMT_ARG", "");
return line
def getDefaultfile(name):
file1 = open(name, "r+", encoding="utf8", errors='replace')
result = ''
while True:
line = file1.readline()
if not line:
break
result += line
file1.close
return result
def getModifiedfile(name):
file1 = open(name, "r+", encoding="utf8", errors='replace')
prevLines = False
result = ''
while True:
line = file1.readline()
if not line:
break
line, prevLines = checkTextLine(line, prevLines)
result += line
file1.close
return result
def updModifiedfile(name, text):
file = open(name, "w", encoding="utf8", errors='replace')
file.write(text)
file.close()
def handlefile(name):
oldtext = getDefaultfile(name)
newtext = getModifiedfile(name)
if oldtext != newtext:
updModifiedfile(name, newtext)
p = pathlib.Path('.')
for i in p.glob('**/*'):
fname = i.absolute()
if '.cpp' in i.name:
handlefile(fname)
if '.h' in i.name:
handlefile(fname)

View File

@ -0,0 +1,132 @@
# Get the directory to acore.json
$scriptDirectory = $PSScriptRoot
$relativePath = "..\.."
$combinedPath = Join-Path -Path $scriptDirectory -ChildPath $relativePath
$fullPath = Resolve-Path -Path $combinedPath
$jsonFilePath = "$fullPath\acore.json"
# Get the directory for SQL update
$relativePathDbWorldUpdate = "..\..\data\sql\updates\db_world"
$combinedPathDbWorldUpdate = Join-Path -Path $scriptDirectory -ChildPath $relativePathDbWorldUpdate
$fullPathDbWorldUpdate = Resolve-Path -Path $combinedPathDbWorldUpdate
Write-Host " ___ _ _ ___ "
Write-Host "/ \ ___ ___ _ _ ___ | |_ | |_ / __| ___ _ _ ___ "
Write-Host "| - ||_ // -_)| '_|/ _ \| _|| \ | (__ / _ \| '_|/ -_)"
Write-Host "|_|_|/__|\___||_| \___/ \__||_||_| \___|\___/|_| \___|"
Write-Host "AzerothCore 3.3.5a - www.azerothcore.org"
Write-Host ""
Write-Host "Welcome to the AzerothCore Version Updater for database squashes!"
Write-Host ""
Write-Host "You have configured:"
Write-Host "acore.json Path: '$jsonFilePath'"
Write-Host "World SQL Updates path: '$fullPathDbWorldUpdate'"
Write-Host ""
Write-Host "Make sure you read the entire process before you continue."
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/data/sql/base/database-squash.md"
Write-Host "https://github.com/azerothcore/azerothcore-wotlk/blob/master/apps/VersionUpdater/versionupdater.md"
Write-Host ""
# Check if the user wants to continue using the tool
do {
$confirmation = Read-Host "Do you want to continue using the tool? (Y/N)"
if ($confirmation -eq 'Y' -or $confirmation -eq 'y') {
# Continue the script
Write-Host "AzerothCore Version Updater starts."
Write-Host ""
$continue = $true
}
elseif ($confirmation -eq 'N' -or $confirmation -eq 'n') {
# Exit the script
Write-Host "Exiting the AzerothCore Version Updater."
exit
}
else {
Write-Host "Invalid input. Please enter Y or N."
$continue = $null
}
} while ($continue -eq $null)
# Read the JSON file and convert it to a PowerShell object
$jsonContent = Get-Content -Path $jsonFilePath | ConvertFrom-Json
# Get the current version
$currentVersion = $jsonContent.version
# Match version components (major.minor.patch and optional suffix like -dev or -alpha)
if ($currentVersion -match '(\d+)\.(\d+)\.(\d+)(-.*)?') {
$major = $matches[1]
$minor = $matches[2]
$patch = $matches[3]
$suffix = $matches[4]
# Increment the major version
$major = [int]$major + 1
# Reset minor and patch version to 0 (if incrementing major)
$minor = 0
$patch = 0
# Reassemble the version with the suffix if it exists
$newVersion = "$major.$minor.$patch$suffix"
# Update the version in the JSON object
$jsonContent.version = $newVersion
} else {
Write-Host "Unknown error in $jsonFilePath. Exiting."
exit
}
# Convert the updated object back to JSON format
$newJsonContent = $jsonContent | ConvertTo-Json -Depth 3
# Write the updated content back to the file
$newJsonContent | Set-Content -Path $jsonFilePath
Write-Host "acore.json version updated to $newVersion"
# Create the SQL Version update file.
# Get today's date in the format YYYY_MM_DD
$today = Get-Date -Format "yyyy_MM_dd"
# Get the list of files in the directory that match the pattern "YYYY_MM_DD_versionNumber.sql"
$existingFiles = Get-ChildItem -Path $fullPathDbWorldUpdate -Filter "$today*_*.sql"
# If no files exist for today, start with version number 00
if ($existingFiles.Count -eq 0) {
[int]$newVersionNumber = 0
} else {
# Extract the version number from the existing files (e.g., YYYY_MM_DD_versionNumber.sql)
$maxVersionNumber = $existingFiles | ForEach-Object {
if ($_ -match "$today_(\d{2})\.sql") {
[int]$matches[1]
}
} | Measure-Object -Maximum | Select-Object -ExpandProperty Maximum
# Increment the version number by 1
[int]$newVersionNumber = $maxVersionNumber + 1
}
# Format the new version number as a two-digit number (e.g., 01, 02, etc.)
$formattedVersionNumber = $newVersionNumber.ToString("D2")
# Define the new filename using the date and incremented version number
$newFileName = "$today" + "_$formattedVersionNumber.sql"
$newFilePath = Join-Path -Path $fullPathDbWorldUpdate -ChildPath $newFileName
# Define the SQL content to write to the file
$tableName = '`version`'
$db_version = '`db_version`'
$db_version_content = "'ACDB 335.$major-dev'"
$cache_id = '`cache_id`'
$sqlContent = "UPDATE $tableName SET $db_version=$db_version_content, $cache_id=$major LIMIT 1;"
# Write the content to the new SQL file
$sqlContent | Set-Content -Path $newFilePath
Write-Host "SQL file created: $newFilePath"
Write-Host "SQL content: $sqlContent"
Write-Host ""
Write-Host "Version Updater completed."
Write-Host "Have a nice day :)"

View File

@ -0,0 +1,53 @@
# The AzerothCore Version Updater for Database Squashes
> [!CAUTION]
> These steps are only for project maintainers who intend to update base files.
## Description of the tool
This tool updates the version in DB and acore.json automatically. Hence, it must run from this directory.
This is how it works step-by-step:
1. Check that all paths look correct.
2. Accept to continue using the tool.
3. The tool will update the acore.json file and increment it by 1.
4. The tool will create a file with the proper UPDATE for world database in `..\..\data\sql\updates\db_world`.
## Run the tool
> [!IMPORTANT]
> This tool CAN NOT be moved outside this directory. If you do it will create files in the wrong places.
1. If you haven't run PowerShell scripts before, you'll need to adjust the execution policy.
- Open PowerShell as an Administrator.
- Run the following command to allow running scripts:
```ps
Set-ExecutionPolicy RemoteSigned -Scope CurrentUser
```
- This allows scripts to run on your system, but they need to be locally created or downloaded from trusted sources.
2. Open PowerShell (PS)
- Press Win + X and select Windows PowerShell (Admin) / Terminal (Admin)
3. Navigate to the script
- In PS, use the `cd` command to change the directory
```ps
cd "C:\AzerothCore\apps\VersionUpdater"
```
4. Run the script
- In PS, run the script
```ps
.\VersionUpdater.ps1
```
5. Follow the instructions given by the tool.
6. Now refer back to the database-squash.md instructions. (Located in ..\..\data\sql\base\)
Completed :)

View File

@ -0,0 +1,26 @@
function registerHooks() { acore_event_registerHooks "$@"; }
function runHooks() { acore_event_runHooks "$@"; }
source "$AC_PATH_CONF/dist/config.sh" # include dist to avoid missing conf variables
# first check if it's defined in env, otherwise use the default
USER_CONF_PATH=${USER_CONF_PATH:-"$AC_PATH_CONF/config.sh"}
if [ -f "$USER_CONF_PATH" ]; then
source "$USER_CONF_PATH" # should overwrite previous
else
echo "NOTICE: file <$USER_CONF_PATH> not found, we use default configuration only."
fi
#
# Load modules
#
for entry in "$AC_PATH_MODULES/"*/include.sh
do
if [ -e "$entry" ]; then
source "$entry"
fi
done
ACORE_VERSION=$("$AC_PATH_DEPS/jsonpath/JSONPath.sh" -f "$AC_PATH_ROOT/acore.json" -b '$.version')

View File

@ -0,0 +1,28 @@
unamestr=$(uname)
if [[ "$unamestr" == 'Darwin' ]]; then
if ! command -v brew &>/dev/null ; then
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
fi
if ! [ "${BASH_VERSINFO}" -ge 4 ]; then
brew install bash
fi
if ! command -v greadlink &>/dev/null ; then
brew install coreutils
fi
AC_PATH_ROOT=$(greadlink -f "$AC_PATH_APPS/../")
else
AC_PATH_ROOT=$(readlink -f "$AC_PATH_APPS/../")
fi
case $AC_PATH_ROOT in
/*) AC_PATH_ROOT=$AC_PATH_ROOT;;
*) AC_PATH_ROOT=$PWD/$AC_PATH_ROOT;;
esac
AC_PATH_CONF="$AC_PATH_ROOT/conf"
AC_PATH_MODULES="$AC_PATH_ROOT/modules"
AC_PATH_DEPS="$AC_PATH_ROOT/deps"
AC_PATH_VAR="$AC_PATH_ROOT/var"

View File

@ -0,0 +1,16 @@
[[ ${GUARDYVAR:-} -eq 1 ]] && return || readonly GUARDYVAR=1 # include it once
# force default language for applications
LC_ALL=C
AC_PATH_APPS="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )"
AC_PATH_SHARED="$AC_PATH_APPS/bash_shared"
source "$AC_PATH_SHARED/defines.sh"
source "$AC_PATH_DEPS/acore/bash-lib/src/event/hooks.sh"
source "$AC_PATH_SHARED/common.sh"
[[ "$OSTYPE" = "msys" ]] && AC_BINPATH_FULL="$BINPATH" || AC_BINPATH_FULL="$BINPATH/bin"

8
apps/ci/ci-compile.sh Normal file
View File

@ -0,0 +1,8 @@
#!/bin/bash
set -e
echo "compile core"
export AC_CCACHE=true
./acore.sh "compiler" "all"

View File

@ -0,0 +1,69 @@
#!/bin/bash
set -e
cat >>conf/config.sh <<CONFIG_SH
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
CWARNINGS=ON
CDEBUG=OFF
CTYPE=Release
CTOOLS_BUILD=none
CSCRIPTS=static
CMODULES=static
CBUILD_TESTING=ON
CSCRIPTPCH=ON
CCOREPCH=ON
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
CONFIG_SH
case $COMPILER in
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
"gcc" )
time sudo apt-get install -y gcc g++
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
;;
"gcc8" )
time sudo apt-get install -y gcc-8 g++-8
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
;;
"gcc10" )
time sudo apt-get install -y gcc-10 g++-10
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
;;
# this is in order to use the "default" clang version of the OS, without forcing a specific version
"clang" )
time sudo apt-get install -y clang
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
;;
"clang10" )
time sudo apt-get install -y clang-10
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
;;
"clang11" )
time sudo apt-get install -y clang-11
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
;;
"clang12" )
time sudo apt-get install -y clang-12
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
;;
* )
echo "Unknown compiler $COMPILER"
exit 1
;;
esac

69
apps/ci/ci-conf-core.sh Normal file
View File

@ -0,0 +1,69 @@
#!/bin/bash
set -e
cat >>conf/config.sh <<CONFIG_SH
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
CWARNINGS=ON
CDEBUG=OFF
CTYPE=Release
CTOOLS_BUILD=none
CSCRIPTS=static
CMODULES=static
CBUILD_TESTING=ON
CSCRIPTPCH=OFF
CCOREPCH=OFF
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
CONFIG_SH
case $COMPILER in
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
"gcc" )
time sudo apt-get install -y gcc g++
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
;;
"gcc8" )
time sudo apt-get install -y gcc-8 g++-8
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
;;
"gcc10" )
time sudo apt-get install -y gcc-10 g++-10
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
;;
# this is in order to use the "default" clang version of the OS, without forcing a specific version
"clang" )
time sudo apt-get install -y clang
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
;;
"clang10" )
time sudo apt-get install -y clang-10
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
;;
"clang11" )
time sudo apt-get install -y clang-11
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
;;
"clang12" )
time sudo apt-get install -y clang-12
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
;;
* )
echo "Unknown compiler $COMPILER"
exit 1
;;
esac

36
apps/ci/ci-conf-db.sh Normal file
View File

@ -0,0 +1,36 @@
#!/bin/bash
set -e
cat >>conf/config.sh <<CONFIG_SH
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
CWARNINGS=ON
CDEBUG=OFF
CTYPE=Release
CAPPS_BUILD=none
CTOOLS_BUILD=db-only
CSCRIPTPCH=OFF
CCOREPCH=OFF
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
CONFIG_SH
case $COMPILER in
# this is in order to use the "default" clang version of the OS, without forcing a specific version
"clang" )
time sudo apt-get install -y clang
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
;;
"clang12" )
time sudo apt-get install -y clang-12
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
;;
* )
echo "Unknown compiler $COMPILER"
exit 1
;;
esac

67
apps/ci/ci-conf-tools.sh Normal file
View File

@ -0,0 +1,67 @@
#!/bin/bash
set -e
cat >>conf/config.sh <<CONFIG_SH
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
CWARNINGS=ON
CDEBUG=OFF
CTYPE=Release
CAPPS_BUILD=none
CTOOLS_BUILD=maps-only
CSCRIPTPCH=OFF
CCOREPCH=OFF
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
CONFIG_SH
case $COMPILER in
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
"gcc" )
time sudo apt-get install -y gcc g++
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
;;
"gcc8" )
time sudo apt-get install -y gcc-8 g++-8
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
;;
"gcc10" )
time sudo apt-get install -y gcc-10 g++-10
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
;;
# this is in order to use the "default" clang version of the OS, without forcing a specific version
"clang" )
time sudo apt-get install -y clang
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
;;
"clang10" )
time sudo apt-get install -y clang-10
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
;;
"clang11" )
time sudo apt-get install -y clang-11
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
;;
"clang12" )
time sudo apt-get install -y clang-12
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
;;
* )
echo "Unknown compiler $COMPILER"
exit 1
;;
esac

15
apps/ci/ci-dry-run.sh Normal file
View File

@ -0,0 +1,15 @@
#!/bin/bash
set -e
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Start mysql
sudo systemctl start mysql
source "$CURRENT_PATH/ci-gen-server-conf-files.sh" $1 "etc" "bin" "root"
(cd ./env/dist/bin/ && timeout 5m ./$APP_NAME -dry-run)
# Stop mysql
sudo systemctl stop mysql

18
apps/ci/ci-error-check.sh Normal file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
ERRORS_FILE="./env/dist/bin/Errors.log";
echo "Checking Startup Errors"
echo
if [[ -s ${ERRORS_FILE} ]]; then
printf "The Errors.log file contains startup errors:\n\n";
cat ${ERRORS_FILE};
printf "\nPlease solve the startup errors listed above!\n";
exit 1;
else
echo "> No startup errors found in Errors.log";
fi
echo
echo "Done"

View File

@ -0,0 +1,15 @@
APP_NAME=$1
CONFIG_FOLDER=${2:-"etc"}
BIN_FOLDER=${3-"bin"}
MYSQL_ROOT_PASSWORD=${4:-""}
# copy dist files to conf files
cp ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf.dist ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
# replace login info
sed -i "s/127.0.0.1;3306;acore;acore/localhost;3306;root;$MYSQL_ROOT_PASSWORD/" ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
if [[ $APP_NAME == "worldserver" ]]; then
sed -i 's/DataDir = \".\"/DataDir = \".\/data"/' ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
git clone --depth=1 --branch=master --single-branch https://github.com/ac-data/ac-data.git ./env/dist/$BIN_FOLDER/data
fi

View File

@ -0,0 +1,107 @@
#!/bin/bash
set -e
echo "install modules"
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-1v1-arena modules/mod-1v1-arena
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-account-mounts modules/mod-account-mounts
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ah-bot modules/mod-ah-bot
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-anticheat modules/mod-anticheat
# NOTE: disabled because it does not compile right now with latest AC
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-antifarming modules/mod-antifarming
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-arena-3v3-solo-queue modules/mod-arena-3v3-solo-queue
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-arena-replay modules/mod-arena-replay
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-auto-revive modules/mod-auto-revive
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-autobalance modules/mod-autobalance
# NOTE: disabled because it causes DB error
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-azerothshard.git modules/mod-azerothshard
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-better-item-reloading modules/mod-better-item-reloading
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-item-reward modules/mod-bg-item-reward
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-reward modules/mod-bg-reward
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-boss-announcer modules/mod-boss-announcer
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-breaking-news-override modules/mod-breaking-news-override
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-buff-command modules/mod-buff-command
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cfbg modules/mod-cfbg
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-character-tools modules/mod-character-tools
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-login modules/mod-chat-login
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-transmitter modules/mod-chat-transmitter
# NOTE: disabled because it causes DB startup error
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chromie-xp modules/mod-chromie-xp
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-congrats-on-level modules/mod-congrats-on-level
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-costumes modules/mod-costumes
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cta-switch modules/mod-cta-switch
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-custom-login modules/mod-custom-login
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-desertion-warnings modules/mod-desertion-warnings
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-detailed-logging modules/mod-detailed-logging
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-dmf-switch modules/mod-dmf-switch
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-duel-reset modules/mod-duel-reset
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-dynamic-xp modules/mod-dynamic-xp
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-eluna modules/mod-eluna
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-emblem-transfer modules/mod-emblem-transfer
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-fireworks-on-level modules/mod-fireworks-on-level
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-global-chat modules/mod-global-chat
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-guild-zone-system modules/mod-guild-zone-system
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-guildhouse modules/mod-guildhouse
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-individual-xp modules/mod-individual-xp
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-instance-reset modules/mod-instance-reset
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-instanced-worldbosses modules/mod-instanced-worldbosses
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ip-tracker modules/mod-ip-tracker
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-item-level-up modules/mod-item-level-up
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-keep-out modules/mod-keep-out
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-learn-highest-talent modules/mod-learn-highest-talent
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-learn-spells modules/mod-learn-spells
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-low-level-arena modules/mod-low-level-arena
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-low-level-rbg modules/mod-low-level-rbg
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-mall-teleport modules/mod-mall-teleport
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-morph-all-players modules/mod-morph-all-players
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-morphsummon modules/mod-morphsummon
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-multi-client-check modules/mod-multi-client-check
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-notify-muted modules/mod-notify-muted
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-all-mounts modules/mod-npc-all-mounts
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-beastmaster modules/mod-npc-beastmaster
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-buffer modules/mod-npc-buffer
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-codebox modules/mod-npc-codebox
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-enchanter modules/mod-npc-enchanter
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-free-professions modules/mod-npc-free-professions
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-gambler modules/mod-npc-gambler
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-morph modules/mod-npc-morph
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-services modules/mod-npc-services
# not yet on azerothcore github
git clone --depth=1 --branch=master https://github.com/gozzim/mod-npc-spectator modules/mod-npc-spectator
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-talent-template modules/mod-npc-talent-template
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-npc-titles-tokens modules/mod-npc-titles-tokens
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-phased-duels modules/mod-phased-duels
# outdated
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-playerbots modules/mod-playerbots
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pocket-portal modules/mod-pocket-portal
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-premium modules/mod-premium
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-progression-system.git modules/mod-progression-system
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-promotion-azerothcore modules/mod-promotion-azerothcore
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-quests modules/mod-pvp-quests
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-titles modules/mod-pvp-titles
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-zones modules/mod-pvp-zones
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpscript modules/mod-pvpscript
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpstats-announcer modules/mod-pvpstats-announcer
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-quest-status modules/mod-quest-status
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-queue-list-cache modules/mod-queue-list-cache
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-quick-teleport modules/mod-quick-teleport
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-racial-trait-swap modules/mod-racial-trait-swap
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-random-enchants modules/mod-random-enchants
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-rdf-expansion modules/mod-rdf-expansion
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-resurrection-scroll modules/mod-resurrection-scroll
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-reward-played-time modules/mod-reward-played-time
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-reward-shop modules/mod-reward-shop
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-server-auto-shutdown.git modules/mod-server-auto-shutdown
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-solocraft modules/mod-solocraft
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-skip-dk-starting-area modules/mod-skip-dk-starting-area
# has core patch file
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-spell-regulator modules/mod-spell-regulator
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-starter-guild modules/mod-starter-guild
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-system-vip modules/mod-system-vip
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-tic-tac-toe modules/mod-tic-tac-toe
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-top-arena modules/mod-top-arena
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-transmog modules/mod-transmog
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-war-effort modules/mod-war-effort
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-weekend-xp modules/mod-weekend-xp
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-who-logged modules/mod-who-logged
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-zone-difficulty modules/mod-zone-difficulty

74
apps/ci/ci-install.sh Normal file
View File

@ -0,0 +1,74 @@
#!/bin/bash
set -e
cat >>conf/config.sh <<CONFIG_SH
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
CWARNINGS=ON
CDEBUG=OFF
CTYPE=Release
CSCRIPTS=static
CBUILD_TESTING=ON
CSERVERS=ON
CTOOLS=ON
CSCRIPTPCH=OFF
CCOREPCH=OFF
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
CONFIG_SH
time sudo apt-get update -y
# time sudo apt-get upgrade -y
time sudo apt-get install -y git lsb-release sudo
time ./acore.sh install-deps
case $COMPILER in
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
"gcc" )
time sudo apt-get install -y gcc g++
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
;;
"gcc8" )
time sudo apt-get install -y gcc-8 g++-8
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
;;
"gcc10" )
time sudo apt-get install -y gcc-10 g++-10
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
;;
# this is in order to use the "default" clang version of the OS, without forcing a specific version
"clang" )
time sudo apt-get install -y clang
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
;;
"clang10" )
time sudo apt-get install -y clang-10
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
;;
"clang11" )
time sudo apt-get install -y clang-11
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
;;
"clang12" )
time sudo apt-get install -y clang-12
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
;;
* )
echo "Unknown compiler $COMPILER"
exit 1
;;
esac

View File

@ -0,0 +1,54 @@
import * as semver from "https://deno.land/x/semver/mod.ts";
// specify the needed paths here
const CHANGELOG_PATH = "doc/changelog";
const CHANGELOG_PENDING_PATH = `${CHANGELOG_PATH}/pendings`;
const CHANGELOG_MASTER_FILE = `${CHANGELOG_PATH}/master.md`;
const ACORE_JSON = "./acore.json";
// read the acore.json file to work with the versioning
const decoder = new TextDecoder("utf-8");
const data = await Deno.readFile(ACORE_JSON);
const acoreInfo = JSON.parse(decoder.decode(data));
let changelogText = await Deno.readTextFile(CHANGELOG_MASTER_FILE);
const currentVersion = acoreInfo.version;
const res=Deno.run({ cmd: [ "git", "rev-parse",
"HEAD"],
stdout: 'piped',
stderr: 'piped',
stdin: 'null' });
await res.status();
const gitVersion = new TextDecoder().decode(await res.output());
for await (const dirEntry of Deno.readDir(CHANGELOG_PENDING_PATH)) {
if (!dirEntry.isFile || !dirEntry.name.endsWith(".md")) {
continue;
}
// Upgrade the prerelease version number (e.g. 1.0.0-dev.1 -> 1.0.0-dev.2)
acoreInfo.version = semver.inc(acoreInfo.version, "prerelease", {
includePrerelease: true,
});
// read the pending file found and add it at the beginning of the changelog text
const data = await Deno.readTextFile(
`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`,
);
changelogText = `## ${acoreInfo.version} | Commit: [${gitVersion}](https://github.com/azerothcore/azerothcore-wotlk/commit/${gitVersion}\n\n${data}\n${changelogText}`;
// remove the pending file
await Deno.remove(`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`);
}
// write to acore.json and master.md only if new version is available
if (currentVersion != acoreInfo.version) {
console.log(`Changelog version upgraded from ${currentVersion} to ${acoreInfo.version}`)
Deno.writeTextFile(CHANGELOG_MASTER_FILE, changelogText);
Deno.writeTextFile(ACORE_JSON, JSON.stringify(acoreInfo, null, 2)+"\n");
} else {
console.log("No changelogs to add")
}

74
apps/ci/ci-pending-sql.sh Normal file
View File

@ -0,0 +1,74 @@
#!/usr/bin/env bash
set -euo pipefail
CURRENT_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$CURRENT_PATH/../bash_shared/includes.sh"
UPDATES_PATH="$AC_PATH_ROOT/data/sql/updates"
# get_next_index "data/sql/updates/db_world/2024_10_14_22.sql"
# => 23
# get_next_index ""
# => 00
function get_next_index() {
if [[ -n "$1" ]]; then
# PREV_COUNT should be a non-zero padded number
PREV_COUNT="$(
# grabs the filename of the first argument, removes ".sql" suffix.
basename "$1" .sql |
# get the last number
cut -f4 -d_ |
# retrieve the last number, without zero padding
grep -oE "[1-9][0-9]*$"
)"
printf '%02d' "$((PREV_COUNT + 1))"
else
echo "00"
fi
}
# lists all SQL files in the appropriate data/sql/updates/db_$1, and then moves them to a standard format, ordered by date and how many imports have happened that day. The name should be in this format:
#
# /path/to/data/sql/updates/db_NAME/YYYY_MM_DD_INDEX.sql
#
# Where INDEX is a number with a minimum with a minimum width (0-padded) of 2
#
# for example, "data/sql/updates/db_world/2024_10_01_03.sql" translates to "the third update in the world database from October 01, 2024"
TODAY="$(date +%Y_%m_%d)"
function import() {
PENDING_PATH="$AC_PATH_ROOT/data/sql/updates/pending_db_$1"
UPDATES_DIR="$UPDATES_PATH/db_$1"
# Get the most recent SQL file applied to this database. Used for the header comment
LATEST_UPDATE="$(find "$UPDATES_DIR" -iname "*.sql" | sort -h | tail -n 1)"
# Get latest SQL file applied to this database, today. This could be empty.
LATEST_UPDATE_TODAY="$(find "$UPDATES_DIR" -iname "$TODAY*.sql" | sort -h | tail -n 1)"
for entry in "$PENDING_PATH"/*.sql; do
if [[ -f "$entry" ]]; then
INDEX="$(get_next_index "$LATEST_UPDATE_TODAY")"
OUTPUT_FILE="${UPDATES_DIR}/${TODAY}_${INDEX}.sql"
# ensure a note is added as a header comment
echo "-- DB update $(basename "$LATEST_UPDATE" .sql) -> $(basename "$OUTPUT_FILE" .sql)" >"$OUTPUT_FILE"
# fill in the SQL contents under that
cat "$entry" >>"$OUTPUT_FILE"
# remove the unneeded file
rm -f "$entry"
# set the newest file to the file we just moved
LATEST_UPDATE_TODAY="$OUTPUT_FILE"
LATEST_UPDATE="$OUTPUT_FILE"
fi
done
}
import "world"
import "characters"
import "auth"
echo "Done."

View File

@ -0,0 +1,3 @@
#!/bin/bash
time var/build/obj/src/test/unit_tests

43
apps/ci/mac/ci-compile.sh Normal file
View File

@ -0,0 +1,43 @@
#!/usr/bin/env bash
export OPENSSL_ROOT_DIR=$(brew --prefix openssl@3)
export CCACHE_CPP2=true
export CCACHE_MAXSIZE='500M'
export CCACHE_COMPRESS=1
export CCACHE_COMPRESSLEVEL=9
ccache -s
cd var/build/obj
mysql_include_path=$(brew --prefix mysql)/include/mysql
mysql_lib_path=$(brew --prefix mysql)/lib/libmysqlclient.dylib
if [ ! -d "$mysql_include_path" ]; then
echo "Original mysql include directory doesn't exist. Lets try to use the first available folder in mysql dir."
base_dir=$(brew --cellar mysql)/$(basename $(ls -d $(brew --cellar mysql)/*/ | head -n 1))
echo "Trying the next mysql base dir: $base_dir"
mysql_include_path=$base_dir/include/mysql
mysql_lib_path=$base_dir/lib/libmysqlclient.dylib
fi
time cmake ../../../ \
-DTOOLS_BUILD=all \
-DSCRIPTS=static \
-DCMAKE_BUILD_TYPE=Release \
-DMYSQL_ADD_INCLUDE_PATH=$mysql_include_path \
-DMYSQL_LIBRARY=$mysql_lib_path \
-DREADLINE_INCLUDE_DIR=$(brew --prefix readline)/include \
-DREADLINE_LIBRARY=$(brew --prefix readline)/lib/libreadline.dylib \
-DOPENSSL_INCLUDE_DIR="$OPENSSL_ROOT_DIR/include" \
-DOPENSSL_SSL_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libssl.dylib" \
-DOPENSSL_CRYPTO_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libcrypto.dylib" \
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
-DUSE_SCRIPTPCH=0 \
-DUSE_COREPCH=0 \
;
time make -j $(($(sysctl -n hw.ncpu ) + 2))
ccache -s

View File

@ -0,0 +1,263 @@
import io
import os
import sys
import re
# Get the src directory of the project
src_directory = os.path.join(os.getcwd(), 'src')
# Global variables
error_handler = False
results = {
"Multiple blank lines check": "Passed",
"Trailing whitespace check": "Passed",
"GetCounter() check": "Passed",
"Misc codestyle check": "Passed",
"GetTypeId() check": "Passed",
"NpcFlagHelpers check": "Passed",
"ItemFlagHelpers check": "Passed",
"ItemTemplateFlagHelpers check": "Passed"
}
# Main function to parse all the files of the project
def parsing_file(directory: str) -> None:
print("Starting AzerothCore CPP Codestyle check...")
print(" ")
print("Please read the C++ Code Standards for AzerothCore:")
print("https://www.azerothcore.org/wiki/cpp-code-standards")
print(" ")
for root, _, files in os.walk(directory):
for file in files:
if not file.endswith('.ico'): # Skip .ico files that cannot be read
file_path = os.path.join(root, file)
file_name = file
try:
with open(file_path, 'r', encoding='utf-8') as file:
multiple_blank_lines_check(file, file_path)
trailing_whitespace_check(file, file_path)
get_counter_check(file, file_path)
if not file_name.endswith('.cmake') and file_name != 'CMakeLists.txt':
misc_codestyle_check(file, file_path)
if file_name != 'Object.h':
get_typeid_check(file, file_path)
if file_name != 'Unit.h':
npcflags_helpers_check(file, file_path)
if file_name != 'Item.h':
itemflag_helpers_check(file, file_path)
if file_name != 'ItemTemplate.h':
itemtemplateflag_helpers_check(file, file_path)
except UnicodeDecodeError:
print(f"\nCould not decode file {file_path}")
sys.exit(1)
# Output the results
print("")
for check, result in results.items():
print(f"{check} : {result}")
if error_handler:
print("\nPlease fix the codestyle issues above.")
sys.exit(1)
else:
print(f"\nEverything looks good")
# Codestyle patterns checking for multiple blank lines
def multiple_blank_lines_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
consecutive_blank_lines = 0
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if line.strip() == '':
consecutive_blank_lines += 1
if consecutive_blank_lines > 1:
print(f"Multiple blank lines found in {file_path} at line {line_number - 1}")
check_failed = True
else:
consecutive_blank_lines = 0
# Additional check for the end of the file
if consecutive_blank_lines >= 1:
print(f"Multiple blank lines found at the end of: {file_path}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["Multiple blank lines check"] = "Failed"
# Codestyle patterns checking for whitespace at the end of the lines
def trailing_whitespace_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if line.endswith(' \n'):
print(f"Trailing whitespace found: {file_path} at line {line_number}")
if not error_handler:
error_handler = True
results["Trailing whitespace check"] = "Failed"
# Codestyle patterns checking for ObjectGuid::GetCounter()
def get_counter_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if 'ObjectGuid::GetCounter()' in line:
print(f"Please use ObjectGuid::ToString().c_str() instead ObjectGuid::GetCounter(): {file_path} at line {line_number}")
if not error_handler:
error_handler = True
results["GetCounter() check"] = "Failed"
# Codestyle patterns checking for GetTypeId()
def get_typeid_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if 'GetTypeId() == TYPEID_ITEM' in line or 'GetTypeId() != TYPEID_ITEM' in line:
print(f"Please use IsItem() instead of GetTypeId(): {file_path} at line {line_number}")
check_failed = True
if 'GetTypeId() == TYPEID_UNIT' in line or 'GetTypeId() != TYPEID_UNIT' in line:
print(f"Please use IsCreature() instead of GetTypeId(): {file_path} at line {line_number}")
check_failed = True
if 'GetTypeId() == TYPEID_PLAYER' in line or 'GetTypeId() != TYPEID_PLAYER' in line:
print(f"Please use IsPlayer() instead of GetTypeId(): {file_path} at line {line_number}")
check_failed = True
if 'GetTypeId() == TYPEID_GAMEOBJECT' in line or 'GetTypeId() != TYPEID_GAMEOBJECT' in line:
print(f"Please use IsGameObject() instead of GetTypeId(): {file_path} at line {line_number}")
check_failed = True
if 'GetTypeId() == TYPEID_DYNOBJECT' in line or 'GetTypeId() != TYPEID_DYNOBJECT' in line:
print(f"Please use IsDynamicObject() instead of GetTypeId(): {file_path} at line {line_number}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["GetTypeId() check"] = "Failed"
# Codestyle patterns checking for NpcFlag helpers
def npcflags_helpers_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if 'GetUInt32Value(UNIT_NPC_FLAGS)' in line:
print(
f"Please use GetNpcFlags() instead of GetUInt32Value(UNIT_NPC_FLAGS): {file_path} at line {line_number}")
check_failed = True
if 'HasFlag(UNIT_NPC_FLAGS,' in line:
print(
f"Please use HasNpcFlag() instead of HasFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
check_failed = True
if 'SetUInt32Value(UNIT_NPC_FLAGS,' in line:
print(
f"Please use ReplaceAllNpcFlags() instead of SetUInt32Value(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
check_failed = True
if 'SetFlag(UNIT_NPC_FLAGS,' in line:
print(
f"Please use SetNpcFlag() instead of SetFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
check_failed = True
if 'RemoveFlag(UNIT_NPC_FLAGS,' in line:
print(
f"Please use RemoveNpcFlag() instead of RemoveFlag(UNIT_NPC_FLAGS, ...): {file_path} at line {line_number}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["NpcFlagHelpers check"] = "Failed"
# Codestyle patterns checking for ItemFlag helpers
def itemflag_helpers_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_REFUNDABLE)' in line:
print(
f"Please use IsRefundable() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_REFUNDABLE): {file_path} at line {line_number}")
check_failed = True
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_BOP_TRADEABLE)' in line:
print(
f"Please use IsBOPTradable() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_BOP_TRADEABLE): {file_path} at line {line_number}")
check_failed = True
if 'HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_WRAPPED)' in line:
print(
f"Please use IsWrapped() instead of HasFlag(ITEM_FIELD_FLAGS, ITEM_FIELD_FLAG_WRAPPED): {file_path} at line {line_number}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["ItemFlagHelpers check"] = "Failed"
# Codestyle patterns checking for ItemTemplate helpers
def itemtemplateflag_helpers_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if 'Flags & ITEM_FLAG' in line:
print(
f"Please use HasFlag(ItemFlag) instead of 'Flags & ITEM_FLAG_': {file_path} at line {line_number}")
check_failed = True
if 'Flags2 & ITEM_FLAG2' in line:
print(
f"Please use HasFlag2(ItemFlag2) instead of 'Flags2 & ITEM_FLAG2_': {file_path} at line {line_number}")
check_failed = True
if 'FlagsCu & ITEM_FLAGS_CU' in line:
print(
f"Please use HasFlagCu(ItemFlagsCustom) instead of 'FlagsCu & ITEM_FLAGS_CU_': {file_path} at line {line_number}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["ItemTemplateFlagHelpers check"] = "Failed"
# Codestyle patterns checking for various codestyle issues
def misc_codestyle_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# used to check for "if/else (...) {" "} else" ignores "if/else (...) {...}" "#define ... if/else (...) {"
ifelse_curlyregex = r"^[^#define].*\s+(if|else)(\s*\(.*\))?\s*{[^}]*$|}\s*else(\s*{[^}]*$)"
# used to catch double semicolons ";;" ignores "(;;)"
double_semiregex = r"(?<!\()\s*;;(?!\))"
# used to catch tabs
tab_regex = r"\t"
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if 'const auto&' in line:
print(
f"Please use the 'auto const&' syntax instead of 'const auto&': {file_path} at line {line_number}")
check_failed = True
if re.search(r'\bconst\s+\w+\s*\*\b', line):
print(
f"Please use the 'Class/ObjectType const*' syntax instead of 'const Class/ObjectType*': {file_path} at line {line_number}")
check_failed = True
if [match for match in [' if(', ' if ( '] if match in line]:
print(
f"Please use the 'if (XXXX)' syntax instead of 'if(XXXX)': {file_path} at line {line_number}")
check_failed = True
if re.match(ifelse_curlyregex, line):
print(
f"Curly brackets are not allowed to be leading or trailing if/else statements. Place it on a new line: {file_path} at line {line_number}")
check_failed = True
if re.search(double_semiregex, line):
print(
f"Double semicolon (;;) found in {file_path} at line {line_number}")
check_failed = True
if re.match(tab_regex, line):
print(
f"Tab found! Replace it to 4 spaces: {file_path} at line {line_number}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["Misc codestyle check"] = "Failed"
# Main function
parsing_file(src_directory)

View File

@ -0,0 +1,330 @@
import io
import os
import sys
import re
import glob
# Get the pending directory of the project
base_dir = os.getcwd()
pattern = os.path.join(base_dir, 'data/sql/updates/pending_db_*')
src_directory = glob.glob(pattern)
# Global variables
error_handler = False
results = {
"Multiple blank lines check": "Passed",
"Trailing whitespace check": "Passed",
"SQL codestyle check": "Passed",
"INSERT & DELETE safety usage check": "Passed",
"Missing semicolon check": "Passed",
"Backtick check": "Passed"
}
# Collect all files in all directories
def collect_files_from_directories(directories: list) -> list:
all_files = []
for directory in directories:
for root, _, files in os.walk(directory):
for file in files:
if not file.endswith('.sh'): # Skip .sh files
all_files.append(os.path.join(root, file))
return all_files
# Main function to parse all the files of the project
def parsing_file(files: list) -> None:
print("Starting AzerothCore SQL Codestyle check...")
print(" ")
print("Please read the SQL Standards for AzerothCore:")
print("https://www.azerothcore.org/wiki/sql-standards")
print(" ")
# Iterate over all files
for file_path in files:
try:
with open(file_path, 'r', encoding='utf-8') as file:
multiple_blank_lines_check(file, file_path)
trailing_whitespace_check(file, file_path)
sql_check(file, file_path)
insert_delete_safety_check(file, file_path)
semicolon_check(file, file_path)
backtick_check(file, file_path)
except UnicodeDecodeError:
print(f"\n❌ Could not decode file {file_path}")
sys.exit(1)
# Output the results
print("\n ")
for check, result in results.items():
print(f"{check} : {result}")
if error_handler:
print("\n ")
print("\n❌ Please fix the codestyle issues above.")
sys.exit(1)
else:
print("\n ")
print(f"\n✅ Everything looks good")
# Codestyle patterns checking for multiple blank lines
def multiple_blank_lines_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
consecutive_blank_lines = 0
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if line.strip() == '':
consecutive_blank_lines += 1
if consecutive_blank_lines > 1:
print(f"❌ Multiple blank lines found in {file_path} at line {line_number - 1}")
check_failed = True
else:
consecutive_blank_lines = 0
# Additional check for the end of the file
if consecutive_blank_lines >= 1:
print(f"❌ Multiple blank lines found at the end of: {file_path}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["Multiple blank lines check"] = "Failed"
# Codestyle patterns checking for whitespace at the end of the lines
def trailing_whitespace_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if line.endswith(' \n'):
print(f"❌ Trailing whitespace found: {file_path} at line {line_number}")
check_failed = True
if check_failed:
error_handler = True
results["Trailing whitespace check"] = "Failed"
# Codestyle patterns checking for various codestyle issues
def sql_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
check_failed = False
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if [match for match in ['broadcast_text'] if match in line]:
print(
f"❌ DON'T EDIT broadcast_text TABLE UNLESS YOU KNOW WHAT YOU ARE DOING!\nThis error can safely be ignored if the changes are approved to be sniffed: {file_path} at line {line_number}")
check_failed = True
if "EntryOrGuid" in line:
print(
f"❌ Please use entryorguid syntax instead of EntryOrGuid in {file_path} at line {line_number}\nWe recommend to use keira to have the right syntax in auto-query generation")
check_failed = True
if [match for match in [';;'] if match in line]:
print(
f"❌ Double semicolon (;;) found in {file_path} at line {line_number}")
check_failed = True
if re.match(r"\t", line):
print(
f"❌ Tab found! Replace it to 4 spaces: {file_path} at line {line_number}")
check_failed = True
last_line = line[-1].strip()
if last_line:
print(
f"❌ The last line is not a newline. Please add a newline: {file_path}")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["SQL codestyle check"] = "Failed"
def insert_delete_safety_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the beginning
not_delete = ["creature_template", "gameobject_template", "item_template", "quest_template"]
check_failed = False
previous_line = ""
# Parse all the file
for line_number, line in enumerate(file, start = 1):
if line.startswith("--"):
continue
if "INSERT" in line and "DELETE" not in previous_line:
print(f"❌ No DELETE keyword found before the INSERT in {file_path} at line {line_number}\nIf this error is intended, please notify a maintainer")
check_failed = True
previous_line = line
match = re.match(r"DELETE FROM\s+`([^`]+)`", line, re.IGNORECASE)
if match:
table_name = match.group(1)
if table_name in not_delete:
print(
f"❌ Entries from {table_name} should not be deleted! {file_path} at line {line_number}\nIf this error is intended, please notify a maintainer")
check_failed = True
# Handle the script error and update the result output
if check_failed:
error_handler = True
results["INSERT & DELETE safety usage check"] = "Failed"
def semicolon_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0) # Reset file pointer to the start
check_failed = False
sql_statement_regex = re.compile(r'^\s*(SELECT|INSERT|UPDATE|DELETE|REPLACE|SET)\b', re.IGNORECASE)
block_comment_start = re.compile(r'/\*')
block_comment_end = re.compile(r'\*/')
inline_comment = re.compile(r'--.*')
query_open = False
in_block_comment = False
inside_values_block = False
lines = file.readlines()
total_lines = len(lines)
def get_next_non_blank_line(start):
""" Get the next non-blank, non-comment line starting from `start` """
for idx in range(start, total_lines):
next_line = lines[idx].strip()
if next_line and not next_line.startswith('--') and not next_line.startswith('/*'):
return next_line
return None
for line_number, line in enumerate(lines, start=1):
stripped_line = line.strip()
# Skip single-line comments
if stripped_line.startswith('--'):
continue
# Handle block comments
if in_block_comment:
if '*/' in stripped_line:
in_block_comment = False
stripped_line = stripped_line.split('*/', 1)[1].strip()
else:
continue
else:
if '/*' in stripped_line:
query_open = False # Reset query state at start of block comment
in_block_comment = True
stripped_line = stripped_line.split('/*', 1)[0].strip()
# Skip empty lines (unless inside values block)
if not stripped_line and not inside_values_block:
continue
# Remove inline comments after SQL
stripped_line = stripped_line.split('--', 1)[0].strip()
if stripped_line.upper().startswith("SET") and not stripped_line.endswith(";"):
print(f"❌ Missing semicolon in {file_path} at line {line_number}")
check_failed = True
# Detect query start
if not query_open and any(keyword in stripped_line.upper() for keyword in ["SELECT", "INSERT", "UPDATE", "DELETE", "REPLACE"]):
query_open = True
# Detect start of multi-line VALUES block
if any(kw in stripped_line.upper() for kw in ["INSERT", "REPLACE"]) and "VALUES" in stripped_line.upper():
inside_values_block = True
query_open = True # Ensure query is marked open too
if inside_values_block:
if not stripped_line:
continue # Allow blank lines inside VALUES block
if stripped_line.startswith('('):
# Get next non-blank line to detect if we're at the last row
next_line = get_next_non_blank_line(line_number)
if next_line and next_line.startswith('('):
# Expect comma if another row follows
if not stripped_line.endswith(','):
print(f"❌ Missing comma in {file_path} at line {line_number}")
check_failed = True
else:
# Expect semicolon if this is the final row
if not stripped_line.endswith(';'):
print(f"❌ Missing semicolon in {file_path} at line {line_number}")
check_failed = True
inside_values_block = False
query_open = False
else:
inside_values_block = False # Close block if semicolon was found
elif query_open and not inside_values_block:
# Normal query handling (outside multi-row VALUES block)
if line_number == total_lines and not stripped_line.endswith(';'):
print(f"❌ Missing semicolon in {file_path} at the last line {line_number}")
check_failed = True
query_open = False
elif stripped_line.endswith(';'):
query_open = False
if check_failed:
error_handler = True
results["Missing semicolon check"] = "Failed"
def backtick_check(file: io, file_path: str) -> None:
global error_handler, results
file.seek(0)
check_failed = False
# Find SQL clauses
pattern = re.compile(
r'\b(SELECT|FROM|JOIN|WHERE|GROUP BY|ORDER BY|DELETE FROM|UPDATE|INSERT INTO|SET|REPLACE|REPLACE INTO)\s+(.*?)(?=;$|(?=\b(?:WHERE|SET|VALUES)\b)|$)',
re.IGNORECASE | re.DOTALL
)
# Make sure to ignore values enclosed in single- and doublequotes
quote_pattern = re.compile(r"'(?:\\'|[^'])*'|\"(?:\\\"|[^\"])*\"")
for line_number, line in enumerate(file, start=1):
# Ignore comments
if line.startswith('--'):
continue
# Sanitize single- and doublequotes to prevent false positives
sanitized_line = quote_pattern.sub('', line)
matches = pattern.findall(sanitized_line)
for clause, content in matches:
# Find all words and exclude @variables
words = re.findall(r'\b(?<!@)([a-zA-Z_][a-zA-Z0-9_]*)\b', content)
for word in words:
# Skip MySQL keywords
if word.upper() in {"SELECT", "FROM", "JOIN", "WHERE", "GROUP", "BY", "ORDER",
"DELETE", "UPDATE", "INSERT", "INTO", "SET", "VALUES", "AND",
"IN", "OR", "REPLACE", "NOT", "BETWEEN",
"DISTINCT", "HAVING", "LIMIT", "OFFSET", "AS", "ON", "INNER",
"LEFT", "RIGHT", "FULL", "OUTER", "CROSS", "NATURAL",
"EXISTS", "LIKE", "IS", "NULL", "UNION", "ALL", "ASC", "DESC",
"CASE", "WHEN", "THEN", "ELSE", "END", "CREATE", "TABLE",
"ALTER", "DROP", "DATABASE", "INDEX", "VIEW", "TRIGGER",
"PROCEDURE", "FUNCTION", "PRIMARY", "KEY", "FOREIGN", "REFERENCES",
"CONSTRAINT", "DEFAULT", "AUTO_INCREMENT", "UNIQUE", "CHECK",
"SHOW", "DESCRIBE", "EXPLAIN", "USE", "GRANT", "REVOKE",
"BEGIN", "COMMIT", "ROLLBACK", "SAVEPOINT", "LOCK", "UNLOCK",
"WITH", "RECURSIVE", "COLUMN", "ENGINE", "CHARSET", "COLLATE",
"IF", "ELSEIF", "LOOP", "WHILE", "DO", "HANDLER", "LEAVE",
"ITERATE", "DECLARE", "CURSOR", "FETCH", "OPEN", "CLOSE"}:
continue
# Make sure the word is enclosed in backticks
if not re.search(rf'`{re.escape(word)}`', content):
print(f"❌ Missing backticks around ({word}). {file_path} at line {line_number}")
check_failed = True
if check_failed:
error_handler = True
results["Backtick check"] = "Failed"
# Collect all files from matching directories
all_files = collect_files_from_directories(src_directory)
# Main function
parsing_file(all_files)

2
apps/compiler/.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
config.sh

32
apps/compiler/README.md Normal file
View File

@ -0,0 +1,32 @@
## How to compile:
first of all, if you need some custom configuration you have to copy
/conf/dist/config.sh in /conf/config.sh and configure it
* for a "clean" compilation you must run all scripts in their order:
./1-clean.sh
./2-configure.sh
./3-build.sh
* if you add/rename/delete some sources and you need to compile it you have to run:
./2-configure.sh
./3-build.sh
* if you have modified code only, you just need to run
./3-build.sh
## compiler.sh
compiler.sh script contains an interactive menu to clean/compile/build. You can also run actions directly by command lines specifying the option.
Ex:
./compiler.sh 3
It will start the build process (it's equivalent to ./3-build.sh)
## Note:
For an optimal development process and **really faster** compilation time, is suggested to use clang instead of gcc

76
apps/compiler/compiler.sh Normal file
View File

@ -0,0 +1,76 @@
#!/usr/bin/env bash
set -e
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_PATH/includes/includes.sh"
function run_option() {
re='^[0-9]+$'
if [[ $1 =~ $re ]] && test "${comp_functions[$1-1]+'test'}"; then
${comp_functions[$1-1]}
elif [ -n "$(type -t comp_$1)" ] && [ "$(type -t comp_$1)" = function ]; then
fun="comp_$1"
$fun
else
echo "invalid option, use --help option for the commands list"
fi
}
function comp_quit() {
exit 0
}
comp_options=(
"build: Configure and compile"
"clean: Clean build files"
"configure: Run CMake"
"compile: Compile only"
"all: clean, configure and compile"
"ccacheClean: Clean ccache files, normally not needed"
"ccacheShowStats: show ccache statistics"
"quit: Close this menu")
comp_functions=(
"comp_build"
"comp_clean"
"comp_configure"
"comp_compile"
"comp_all"
"comp_ccacheClean"
"comp_ccacheShowStats"
"comp_quit")
PS3='[ Please enter your choice ]: '
runHooks "ON_AFTER_OPTIONS" #you can create your custom options
function _switch() {
_reply="$1"
_opt="$2"
case $_reply in
""|"--help")
echo "Available commands:"
printf '%s\n' "${options[@]}"
;;
*)
run_option $_reply $_opt
;;
esac
}
while true
do
# run option directly if specified in argument
[ ! -z $1 ] && _switch $@
[ ! -z $1 ] && exit 0
select opt in "${comp_options[@]}"
do
echo "==== ACORE COMPILER ===="
_switch $REPLY
break;
done
done

View File

@ -0,0 +1,7 @@
# you can choose build type from cmd argument
if [ ! -z $1 ]
then
CCTYPE=$1
CCTYPE=${CCTYPE^} # capitalize first letter if it's not yet
fi

View File

@ -0,0 +1,170 @@
function comp_clean() {
DIRTOCLEAN=${BUILDPATH:-var/build/obj}
PATTERN="$DIRTOCLEAN/*"
echo "Cleaning build files in $DIRTOCLEAN"
[ -d "$DIRTOCLEAN" ] && rm -rf $PATTERN
}
function comp_ccacheEnable() {
[ "$AC_CCACHE" != true ] && return
export CCACHE_MAXSIZE=${CCACHE_MAXSIZE:-'1000MB'}
#export CCACHE_DEPEND=true
export CCACHE_SLOPPINESS=${CCACHE_SLOPPINESS:-pch_defines,time_macros,include_file_mtime}
export CCACHE_CPP2=${CCACHE_CPP2:-true} # optimization for clang
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
export CCACHE_COMPRESSLEVEL=${CCACHE_COMPRESSLEVEL:-9}
export CCACHE_COMPILERCHECK=${CCACHE_COMPILERCHECK:-content}
export CCACHE_LOGFILE=${CCACHE_LOGFILE:-"$CCACHE_DIR/cache.debug"}
#export CCACHE_NODIRECT=true
export CCUSTOMOPTIONS="$CCUSTOMOPTIONS -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
}
function comp_ccacheClean() {
[ "$AC_CCACHE" != true ] && echo "ccache is disabled" && return
echo "Cleaning ccache"
ccache -C
ccache -s
}
function comp_ccacheResetStats() {
[ "$AC_CCACHE" != true ] && return
ccache -zc
}
function comp_ccacheShowStats() {
[ "$AC_CCACHE" != true ] && return
ccache -s
}
function comp_configure() {
CWD=$(pwd)
cd $BUILDPATH
echo "Build path: $BUILDPATH"
echo "DEBUG info: $CDEBUG"
echo "Compilation type: $CTYPE"
echo "CCache: $AC_CCACHE"
# -DCMAKE_BUILD_TYPE=$CCTYPE disable optimization "slow and huge amount of ram"
# -DWITH_COREDEBUG=$CDEBUG compiled with debug information
#-DSCRIPTS_COMMANDS=$CSCRIPTS -DSCRIPTS_CUSTOM=$CSCRIPTS -DSCRIPTS_EASTERNKINGDOMS=$CSCRIPTS -DSCRIPTS_EVENTS=$CSCRIPTS -DSCRIPTS_KALIMDOR=$CSCRIPTS \
#-DSCRIPTS_NORTHREND=$CSCRIPTS -DSCRIPTS_OUTDOORPVP=$CSCRIPTS -DSCRIPTS_OUTLAND=$CSCRIPTS -DSCRIPTS_PET=$CSCRIPTS -DSCRIPTS_SPELLS=$CSCRIPTS -DSCRIPTS_WORLD=$CSCRIPTS \
#-DAC_WITH_UNIT_TEST=$CAC_UNIT_TEST -DAC_WITH_PLUGINS=$CAC_PLG \
local DCONF=""
if [ ! -z "$CONFDIR" ]; then
DCONF="-DCONF_DIR=$CONFDIR"
fi
comp_ccacheEnable
OSOPTIONS=""
echo "Platform: $OSTYPE"
case "$OSTYPE" in
darwin*)
OSOPTIONS=" -DMYSQL_ADD_INCLUDE_PATH=/usr/local/include -DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib -DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include -DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl@3/include -DOPENSSL_SSL_LIBRARIES=/usr/local/opt/openssl@3/lib/libssl.dylib -DOPENSSL_CRYPTO_LIBRARIES=/usr/local/opt/openssl@3/lib/libcrypto.dylib "
;;
msys*)
OSOPTIONS=" -DMYSQL_INCLUDE_DIR=C:\tools\mysql\current\include -DMYSQL_LIBRARY=C:\tools\mysql\current\lib\mysqlclient.lib "
;;
esac
cmake $SRCPATH -DCMAKE_INSTALL_PREFIX=$BINPATH $DCONF \
-DAPPS_BUILD=$CAPPS_BUILD \
-DTOOLS_BUILD=$CTOOLS_BUILD \
-DSCRIPTS=$CSCRIPTS \
-DMODULES=$CMODULES \
-DBUILD_TESTING=$CBUILD_TESTING \
-DUSE_SCRIPTPCH=$CSCRIPTPCH \
-DUSE_COREPCH=$CCOREPCH \
-DCMAKE_BUILD_TYPE=$CTYPE \
-DWITH_WARNINGS=$CWARNINGS \
-DCMAKE_C_COMPILER=$CCOMPILERC \
-DCMAKE_CXX_COMPILER=$CCOMPILERCXX \
$CBUILD_APPS_LIST $CBUILD_TOOLS_LIST $OSOPTIONS $CCUSTOMOPTIONS
cd $CWD
runHooks "ON_AFTER_CONFIG"
}
function comp_compile() {
[ $MTHREADS == 0 ] && MTHREADS=$(grep -c ^processor /proc/cpuinfo) && MTHREADS=$(($MTHREADS + 2))
echo "Using $MTHREADS threads"
pushd "$BUILDPATH" >> /dev/null || exit 1
comp_ccacheEnable
comp_ccacheResetStats
time cmake --build . --config $CTYPE -j $MTHREADS
comp_ccacheShowStats
echo "Platform: $OSTYPE"
case "$OSTYPE" in
msys*)
cmake --install . --config $CTYPE
popd >> /dev/null || exit 1
echo "Done"
;;
linux*|darwin*)
local confDir=${CONFDIR:-"$AC_BINPATH_FULL/../etc"}
# create the folders before installing to
# set the current user and permissions
echo "Creating $AC_BINPATH_FULL..."
mkdir -p "$AC_BINPATH_FULL"
echo "Creating $confDir..."
mkdir -p "$confDir"
echo "Cmake install..."
sudo cmake --install . --config $CTYPE
popd >> /dev/null || exit 1
# set all aplications SUID bit
echo "Setting permissions on binary files"
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec sudo chown root:root -- {} +
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec sudo chmod u+s -- {} +
if [[ -n "$DOCKER" ]]; then
[[ -f "$confDir/worldserver.conf.dist" ]] && \
cp -nv "$confDir/worldserver.conf.dist" "$confDir/worldserver.conf"
[[ -f "$confDir/authserver.conf.dist" ]] && \
cp -nv "$confDir/authserver.conf.dist" "$confDir/authserver.conf"
[[ -f "$confDir/dbimport.conf.dist" ]] && \
cp -nv "$confDir/dbimport.conf.dist" "$confDir/dbimport.conf"
fi
echo "Done"
;;
esac
runHooks "ON_AFTER_BUILD"
}
function comp_build() {
comp_configure
comp_compile
}
function comp_all() {
comp_clean
comp_build
}

View File

@ -0,0 +1,23 @@
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$CURRENT_PATH/../../bash_shared/includes.sh"
AC_PATH_COMPILER="$AC_PATH_APPS/compiler"
if [ -f "$AC_PATH_COMPILER/config.sh" ]; then
source "$AC_PATH_COMPILER/config.sh" # should overwrite previous
fi
function ac_on_after_build() {
# move the run engine
cp -rvf "$AC_PATH_APPS/startup-scripts/"* "$BINPATH"
}
registerHooks "ON_AFTER_BUILD" ac_on_after_build
source "$AC_PATH_COMPILER/includes/defines.sh"
source "$AC_PATH_COMPILER/includes/functions.sh"
mkdir -p $BUILDPATH
mkdir -p $BINPATH

View File

@ -0,0 +1,22 @@
# ==== PHP merger (index.php + merge.php) ====
This is a PHP script for merging a new .dist file with your existing .conf file (worldserver.conf.dist and authserver.conf.dist)
It uses sessions so it is multi user safe, it adds any options that are removed to the bottom of the file commented out, just in case it removes something it shouldn't.
If you add your custom patch configs below "# Custom" they will be copied exactly as they are.
Your new config will be found under $basedir/session_id/newconfig.conf.merge
If you do not run a PHP server on your machiene you can read this guide on ["How to execute PHP code using command line?"](https://www.geeksforgeeks.org/how-to-execute-php-code-using-command-line/) on geeksforgeeks.org.
```
php -S localhost:port -t E:\Azerothcore-wotlk\apps\config-merger\
```
Change port to an available port to use. i.e 8000
Then go to your browser and type:
```
localhost:8000/index.php
```

View File

@ -0,0 +1,44 @@
<?php
/*
* Project Name: Config File Merge For Mangos/Trinity/AzerothCore Server
* Date: 01.01.2010 inital version (0.0.1a)
* Author: Paradox
* Copyright: Paradox
* Email: iamparadox@netscape.net (paypal email)
* License: GNU General Public License v2(GPL)
*/
?>
<meta http-equiv="Content-Type" content="text/html; charset=windows-1251">
<FORM enctype="multipart/form-data" ACTION="merge.php" METHOD="POST">
Dist File (.conf.dist)
<br />
<INPUT name="File1" TYPE="file">
<br />
<br />
Current Conf File (.conf)
<br />
<INPUT name="File2" TYPE="file">
<br />
<br />
<INPUT TYPE=RADIO NAME="eol" VALUE="0" CHECKED >Windows -
<INPUT TYPE=RADIO NAME="eol" VALUE="1" >UNIX/Linux
<br />
<br />
<INPUT TYPE="submit" VALUE="Submit">
<br />
<br />
If you have any custom settings, such as from patches,
<br />
make sure they are at the bottom of the file following
<br />
this block (add it if it's not there)
<br />
###############################################################################
<br />
# Custom
<br />
###############################################################################
<br />
<br />
</FORM>

View File

@ -0,0 +1,179 @@
<?php
/*
* Project Name: Config File Merge For Mangos/Trinity Server
* Date: 01.01.2010 inital version (0.0.1a)
* Author: Paradox
* Copyright: Paradox
* Email: iamparadox@netscape.net (paypal email)
* License: GNU General Public License v2(GPL)
*/
error_reporting(0);
if (!empty($_FILES['File1']) && !empty($_FILES['File2']))
{
session_id();
session_start();
$basedir = "merge";
$eol = "\r\n";
if ($_POST['eol'])
$eol = "\n";
else
$eol = "\r\n";
if (!file_exists($basedir))
mkdir($basedir);
if (!file_exists($basedir."/".session_id()))
mkdir($basedir."/".session_id());
$upload1 = $basedir."/".session_id()."/".basename($_FILES['File1']['name']);
$upload2 = $basedir."/".session_id()."/".basename($_FILES['File2']['name']);
if (strpos($upload1, "worldserver") !== false)
$newconfig = $basedir."/".session_id()."/worldserver.conf.merge";
else if (strpos($upload1, "authserver") !== false)
$newconfig = $basedir."/".session_id()."/authserver.conf.merge";
else
$newconfig = $basedir."/".session_id()."/UnkownConfigFile.conf.merge";
$out_file = fopen($newconfig, "w");
$success = false;
if (move_uploaded_file($_FILES['File1']['tmp_name'], $upload1))
{
$success = true;
}
else
{
$success = false;
}
if (move_uploaded_file($_FILES['File2']['tmp_name'], $upload2))
{
$success = true;
}
else
{
$success = false;
}
if ($success)
{
$custom_found = false;
$in_file1 = fopen($upload1,"r");
$in_file2 = fopen($upload2,"r");
$array1 = array();
$array2 = array();
$line = trim(fgets($in_file1));
while (!feof($in_file1))
{
if ((substr($line,0,1) != '#' && substr($line,0,1) != ''))
{
list($key, $val) = explode("=",$line);
$key = trim($key);
$val = trim($val);
$array1[$key] = $val;
}
$line = trim(fgets($in_file1));
}
$line = trim(fgets($in_file2));
while (!feof($in_file2) && !$custom_found)
{
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
{
list($key, $val) = explode("=",$line);
$key = trim($key);
$val = trim($val);
$array2[$key] = $val;
}
if (strtolower($line) == "# custom")
$custom_found = true;
else
$line = trim(fgets($in_file2));
}
fclose($in_file1);
foreach($array2 as $k => $v)
{
if (array_key_exists($k, $array1))
{
$array1[$k] = $v;
unset($array2[$k]);
}
}
$in_file1 = fopen($upload1,"r");
$line = trim(fgets($in_file1));
while (!feof($in_file1))
{
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
{
$array = array();
while (substr($line,0,1) != '#' && substr($line,0,1) != '')
{
list($key, $val) = explode("=",$line);
$key = trim($key);
$val = trim($val);
$array[$key] = $val;
$line = trim(fgets($in_file1));
}
foreach($array as $k => $v)
{
if (array_key_exists($k, $array1))
fwrite($out_file, $k."=".$array1[$k].$eol);
else
continue;
}
unset($array);
if (!feof($in_file1))
fwrite($out_file, $line.$eol);
}
else
fwrite($out_file, $line.$eol);
$line = trim(fgets($in_file1));
}
if ($custom_found)
{
fwrite($out_file, $eol);
fwrite($out_file, "###############################################################################".$eol);
fwrite($out_file, "# Custom".$eol);
$line = trim(fgets($in_file2));
while (!feof($in_file2))
{
fwrite($out_file, $line.$eol);
$line = trim(fgets($in_file2));
}
}
$first = true;
foreach($array2 as $k => $v)
{
if ($first)
{
fwrite($out_file, $eol);
fwrite($out_file, "###############################################################################".$eol);
fwrite($out_file, "# The Following values were removed from the config.".$eol);
$first = false;
}
fwrite($out_file, "# ".$k."=".$v.$eol);
}
if (strpos($upload1, "worldserver") !== false)
{
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
}
else if (strpos($upload1, "authserver") !== false)
{
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
}
unset($array1);
unset($array2);
fclose($in_file1);
fclose($in_file2);
fclose($out_file);
unlink($upload1);
unlink($upload2);
echo "Process done";
echo "<br /><a href=".$newconfig.">Click here to retrieve your merged conf</a>";
}
}
else
{
echo "An error has occurred";
}
?>

1
apps/db_exporter/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
config.sh

View File

@ -0,0 +1,12 @@
This script is used by devs to export the databases to base directories
You should use it on clean databases
## USAGE
NOTE: this script is only working under unix currently
1) You must create a config.sh file changing DB connection configurations
of /conf/config.sh.dist
2) Run the db_export.sh script and wait

View File

@ -0,0 +1,52 @@
#!/usr/bin/env bash
ROOTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../../" && pwd )"
source "$ROOTPATH/apps/bash_shared/includes.sh"
if [ -f "./config.sh" ]; then
source "./config.sh" # should overwrite previous
fi
echo "This is a dev-only procedure to export the DB into the SQL base files. All base files will be overwritten."
read -p "Are you sure you want to continue (y/N)? " choice
case "$choice" in
y|Y ) echo "Exporting the DB into the SQL base files...";;
* ) return;;
esac
echo "===== STARTING PROCESS ====="
function export() {
echo "Working on: "$1
database=$1
var_base_path="DB_"$database"_PATHS"
base_path=${!var_base_path%/}
base_conf="TPATH="$base_path";\
CLEANFOLDER=1; \
CHMODE=0; \
TEXTDUMPS=0; \
PARSEDUMP=1; \
FULL=0; \
DUMPOPTS='--skip-comments --skip-set-charset --routines --extended-insert --order-by-primary --single-transaction --quick'; \
"
var_base_conf="DB_"$database"_CONF"
base_conf=$base_conf${!var_base_conf}
var_base_name="DB_"$database"_NAME"
base_name=${!var_base_name}
bash "$AC_PATH_DEPS/acore/mysql-tools/mysql-tools" "dump" "" "$base_name" "" "$base_conf"
}
for db in ${DATABASES[@]}
do
export "$db"
done
echo "===== DONE ====="

259
apps/docker/Dockerfile Normal file
View File

@ -0,0 +1,259 @@
ARG UBUNTU_VERSION=22.04 # lts
ARG TZ=Etc/UTC
# This target lays out the general directory skeleton for AzerothCore,
# This target isn't intended to be directly used
FROM ubuntu:$UBUNTU_VERSION AS skeleton
ARG DOCKER=1
ARG DEBIAN_FRONTEND=noninteractive
ENV AC_FORCE_CREATE_DB=1
RUN mkdir -pv \
/azerothcore/bin \
/azerothcore/data \
/azerothcore/deps \
/azerothcore/env/dist/bin \
/azerothcore/env/dist/data/Cameras \
/azerothcore/env/dist/data/dbc \
/azerothcore/env/dist/data/maps \
/azerothcore/env/dist/data/mmaps \
/azerothcore/env/dist/data/vmaps \
/azerothcore/env/dist/logs \
/azerothcore/env/dist/temp \
/azerothcore/env/dist/etc \
/azerothcore/modules \
/azerothcore/src \
/azerothcore/build
# Configure Timezone
RUN apt-get update \
&& apt-get install -y --no-install-recommends tzdata ca-certificates \
&& ln -snf "/usr/share/zoneinfo/$TZ" /etc/localtime \
&& echo "$TZ" > /etc/timezone \
&& dpkg-reconfigure --frontend noninteractive tzdata \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /azerothcore
# This target builds the docker image
# This target can be useful to inspect the explicit outputs from the build,
FROM skeleton AS build
ARG CTOOLS_BUILD="all"
ARG CTYPE="RelWithDebInfo"
ARG CCACHE_CPP2="true"
ARG CSCRIPTPCH="OFF"
ARG CSCRIPTS="static"
ARG CMODULES="static"
ARG CSCRIPTS_DEFAULT_LINKAGE="static"
ARG CWITH_WARNINGS="ON"
ARG CMAKE_EXTRA_OPTIONS=""
ARG GIT_DISCOVERY_ACROSS_FILESYSTEM=1
ARG CCACHE_DIR="/ccache"
ARG CCACHE_MAXSIZE="1000MB"
ARG CCACHE_SLOPPINESS="pch_defines,time_macros,include_file_mtime"
ARG CCACHE_COMPRESS=""
ARG CCACHE_COMPRESSLEVEL="9"
ARG CCACHE_COMPILERCHECK="content"
ARG CCACHE_LOGFILE=""
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
build-essential ccache libtool cmake-data make cmake clang \
git lsb-base curl unzip default-mysql-client openssl \
default-libmysqlclient-dev libboost-all-dev libssl-dev libmysql++-dev \
libreadline-dev zlib1g-dev libbz2-dev libncurses5-dev \
&& rm -rf /var/lib/apt/lists/*
COPY CMakeLists.txt /azerothcore/CMakeLists.txt
COPY conf /azerothcore/conf
COPY deps /azerothcore/deps
COPY src /azerothcore/src
COPY modules /azerothcore/modules
ARG CACHEBUST=1
WORKDIR /azerothcore/build
RUN --mount=type=cache,target=/ccache,sharing=locked \
# This may seem silly (and it is), but AzerothCore wants the git repo at
# build time. The git repo is _huge_ and it's not something that really
# makes sense to mount into the container, but this way we can let the build
# have the information it needs without including the hundreds of megabytes
# of git repo into the container.
--mount=type=bind,target=/azerothcore/.git,source=.git \
git config --global --add safe.directory /azerothcore \
&& cmake /azerothcore \
-DCMAKE_INSTALL_PREFIX="/azerothcore/env/dist" \
-DAPPS_BUILD="all" \
-DTOOLS_BUILD="$CTOOLS_BUILD" \
-DSCRIPTS="$CSCRIPTS" \
-DMODULES="$CMODULES" \
-DWITH_WARNINGS="$CWITH_WARNINGS" \
-DCMAKE_BUILD_TYPE="$CTYPE" \
-DCMAKE_CXX_COMPILER="clang++" \
-DCMAKE_C_COMPILER="clang" \
-DCMAKE_CXX_COMPILER_LAUNCHER="ccache" \
-DCMAKE_C_COMPILER_LAUNCHER="ccache" \
-DBoost_USE_STATIC_LIBS="ON" \
&& cmake --build . --config "$CTYPE" -j $(($(nproc) + 1)) \
&& cmake --install . --config "$CTYPE"
#############################
# Base runtime for services #
#############################
FROM skeleton AS runtime
ARG USER_ID=1000
ARG GROUP_ID=1000
ARG DOCKER_USER=acore
ENV ACORE_COMPONENT=undefined
# Install base dependencies for azerothcore
RUN apt-get update && \
apt-get install -y --no-install-recommends \
libmysqlclient21 libreadline8 \
gettext-base default-mysql-client && \
rm -rf /var/lib/apt/lists/*
COPY --from=build /azerothcore/env/dist/etc/ /azerothcore/env/ref/etc
VOLUME /azerothcore/env/dist/etc
ENV PATH="/azerothcore/env/dist/bin:$PATH"
RUN groupadd --gid "$GROUP_ID" "$DOCKER_USER" && \
useradd -d /azerothcore --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
passwd -d "$DOCKER_USER" && \
chown -R "$DOCKER_USER:$DOCKER_USER" /azerothcore
COPY --chown=$USER_ID:$GROUP_ID \
--chmod=755 \
apps/docker/entrypoint.sh /azerothcore/entrypoint.sh
USER $DOCKER_USER
ENTRYPOINT ["/usr/bin/env", "bash", "/azerothcore/entrypoint.sh"]
###############
# Auth Server #
###############
FROM runtime AS authserver
LABEL description="AzerothCore Auth Server"
ENV ACORE_COMPONENT=authserver
# Don't run database migrations. We can leave that up to the db-import container
ENV AC_UPDATES_ENABLE_DATABASES=0
# This disables user prompts. The console is still active, however
ENV AC_DISABLE_INTERACTIVE=1
ENV AC_CLOSE_IDLE_CONNECTIONS=0
COPY --chown=$DOCKER_USER:$DOCKER_USER \
--from=build \
/azerothcore/env/dist/bin/authserver /azerothcore/env/dist/bin/authserver
CMD ["authserver"]
################
# World Server #
################
FROM runtime AS worldserver
LABEL description="AzerothCore World Server"
ENV ACORE_COMPONENT=worldserver
# Don't run database migrations. We can leave that up to the db-import container
ENV AC_UPDATES_ENABLE_DATABASES=0
# This disables user prompts. The console is still active, however
ENV AC_DISABLE_INTERACTIVE=1
ENV AC_CLOSE_IDLE_CONNECTIONS=0
COPY --chown=$DOCKER_USER:$DOCKER_USER \
--from=build \
/azerothcore/env/dist/bin/worldserver /azerothcore/env/dist/bin/worldserver
VOLUME /azerothcore/env/dist/etc
CMD ["worldserver"]
#############
# DB Import #
#############
FROM runtime AS db-import
LABEL description="AzerothCore Database Import tool"
USER $DOCKER_USER
ENV ACORE_COMPONENT=dbimport
COPY --chown=$DOCKER_USER:$DOCKER_USER \
data data
COPY --chown=$DOCKER_USER:$DOCKER_USER \
modules modules
COPY --chown=$DOCKER_USER:$DOCKER_USER\
--from=build \
/azerothcore/env/dist/bin/dbimport /azerothcore/env/dist/bin/dbimport
CMD [ "/azerothcore/env/dist/bin/dbimport" ]
###############
# Client Data #
###############
FROM skeleton AS client-data
LABEL description="AzerothCore client-data"
ENV DATAPATH=/azerothcore/env/dist/data
RUN apt-get update && \
apt-get install -y curl unzip && \
rm -rf /var/lib/apt/lists/*
COPY --chown=$DOCKER_USER:$DOCKER_USER apps apps
VOLUME /azerothcore/env/dist/data
USER $DOCKER_USER
CMD ["bash", "-c", "source /azerothcore/apps/installer/includes/functions.sh && inst_download_client_data" ]
##################
# Map Extractors #
##################
FROM runtime AS tools
LABEL description="AzerothCore Tools"
WORKDIR /azerothcore/env/dist/
RUN mkdir -pv /azerothcore/env/dist/Cameras \
/azerothcore/env/dist/dbc \
/azerothcore/env/dist/maps \
/azerothcore/env/dist/mmaps \
/azerothcore/env/dist/vmaps
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
/azerothcore/env/dist/bin/map_extractor /azerothcore/env/dist/bin/map_extractor
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
/azerothcore/env/dist/bin/mmaps_generator /azerothcore/env/dist/bin/mmaps_generator
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
/azerothcore/env/dist/bin/vmap4_assembler /azerothcore/env/dist/bin/vmap4_assembler
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build \
/azerothcore/env/dist/bin/vmap4_extractor /azerothcore/env/dist/bin/vmap4_extractor

View File

@ -0,0 +1,108 @@
#syntax=docker/dockerfile:1.2
#================================================================
#
# DEV: Stage used for the development environment
# and the locally built services
#
#=================================================================
FROM ubuntu:24.04 as dev
ARG USER_ID=1000
ARG GROUP_ID=1000
ARG DOCKER_USER=acore
ARG TZ=Etc/UTC
LABEL description="AC base image for dev containers"
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
ENV DOCKER=1
# Ensure ac-dev-server can properly pull versions
ENV GIT_DISCOVERY_ACROSS_FILESYSTEM=1
# set timezone environment variable
ENV TZ=$TZ
# set noninteractive mode so tzdata doesn't ask to set timezone on install
ENV DEBIAN_FRONTEND=noninteractive
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
# Classic install
git \
clang lldb lld clang-format clang-tidy \
make cmake \
gcc g++ \
libmysqlclient-dev \
libssl-dev \
libbz2-dev \
libreadline-dev \
libncurses-dev \
mysql-server \
libboost-all-dev \
# Other
curl \
unzip \
sudo \
gdb gdbserver \
libtool \
build-essential \
cmake-data \
openssl \
google-perftools libgoogle-perftools-dev \
libmysql++-dev \
ccache \
tzdata \
# Utility for column command used by dashboard
util-linux \
# Certificates for downloading client data
ca-certificates \
&& rm -rf /var/lib/apt/lists/*
# Ensure git will work with the AzerothCore source directory
RUN git config --global --add safe.directory /azerothcore
# change timezone in container
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime \
&& echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
# Create a non-root user
RUN userdel --remove ubuntu \
&& addgroup --gid "$GROUP_ID" "$DOCKER_USER" \
&& adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" \
&& passwd -d "$DOCKER_USER" \
&& echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
# must be created to set the correct permissions on them
RUN mkdir -p \
/azerothcore/env/dist/bin \
/azerothcore/env/dist/data/Cameras \
/azerothcore/env/dist/data/dbc \
/azerothcore/env/dist/data/maps \
/azerothcore/env/dist/data/mmaps \
/azerothcore/env/dist/data/vmaps \
/azerothcore/env/dist/logs \
/azerothcore/env/dist/temp \
/azerothcore/env/dist/etc \
/azerothcore/var/build/obj
# Correct permissions for non-root operations
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore /run /opt /azerothcore
USER $DOCKER_USER
# copy only necessary files for the acore dashboard
COPY --chown=$DOCKER_USER:$DOCKER_USER apps /azerothcore/apps
COPY --chown=$DOCKER_USER:$DOCKER_USER bin /azerothcore/bin
COPY --chown=$DOCKER_USER:$DOCKER_USER conf /azerothcore/conf
COPY --chown=$DOCKER_USER:$DOCKER_USER data /azerothcore/data
COPY --chown=$DOCKER_USER:$DOCKER_USER deps /azerothcore/deps
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.json /azerothcore/acore.json
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.sh /azerothcore/acore.sh
# Download deno and make sure the dashboard works
RUN bash /azerothcore/acore.sh quit
WORKDIR /azerothcore

41
apps/docker/README.md Normal file
View File

@ -0,0 +1,41 @@
# Docker
Full documentation is [on our wiki](https://www.azerothcore.org/wiki/install-with-docker#installation)
## Building
### Prerequisites
Ensure that you have docker, docker compose (v2), and the docker buildx command
installed.
It's all bundled with [Docker Desktop](https://docs.docker.com/get-docker/),
though if you're using Linux you can install them through your distribution's
package manage or by using the [documentation from docker](https://docs.docker.com/engine/install/)
### Running the Build
1. Build containers with command
```console
$ docker compose build
```
1. Note that the initial build will take a long time, though subsequent builds should be faster
2. Start containers with command
```console
$ docker compose up -d
# Skip the build step
$ docker compose up -d --build
```
1. Note that this command may take a while the first time, for the database import
3. (on first install) You'll need to attach to the worldserver and create an Admin account
```console
$ docker compose attach ac-worldserver
AC> account create admin password 3 -1
```

216
apps/docker/docker-cmd.sh Normal file
View File

@ -0,0 +1,216 @@
#!/bin/bash
# TODO(michaeldelago) decide if we need a wrapper like this around docker
# commands.
#
# Running the docker commands should be simple and familiar.
# Introducting extra steps through the dashboard can cause issues with people
# getting started, especially if they already know docker.
#
# If a new user knows docker, they will feel (pretty close) to right at home.
# If a new user doesn't know docker, it's easy to learn and the knowledge
# applies to much more than azerothcore
set -euo pipefail
COMPOSE_DOCKER_CLI_BUILD="1"
DOCKER_BUILDKIT="1"
# BUILDKIT_INLINE_CACHE="1"
function usage () {
cat <<EOF
Wrapper for shell scripts around docker
usage: $(basename $0) ACTION [ ACTION... ] [ ACTION_ARG... ]
actions:
EOF
# the `-s` will remove the "#" and properly space the action and description
cat <<EOF | column -t -l2 -s'#'
> start:app # Start the development worldserver and authserver
> start:app:d # Start the development worldserver and authserver in detached mode
> build # build the development worldserver and authserver
> pull # pull the development worldserver and authserver
> build:nocache # build the development worldserver and authserver without cache
> clean:build # clean build artifacts from the dev server
> client-data # download client data in the dev server
> dev:up start # the dev server
> dev:build # compile azerothcore using the dev server
> dev:dash # execute the dashboard in the dev server container
> dev:shell [ ARGS... ] # open a bash shell in the dev server
> prod:build # Build the service containers used by acore-docker
> prod:pull # Pull the containers used by acore-docker
> prod:up # Start the services used by acore-docker
> prod:up:d # start the services used by acore-docker in the background
> attach SERVICE # attach to a service currently running in docker compose
EOF
}
# If no args, just spit usage and exit
[[ $# -eq 0 ]] && usage && exit
# loop through commands passed
while [[ $# -gt 0 ]]; do
case "$1" in
start:app)
set -x
docker compose up
set +x
# pop the head off of the queue of args
# After this, the value of $1 is the value of $2
shift
;;
start:app:d)
set -x
docker compose up -d
set +x
shift
;;
build)
set -x
docker compose build
set +x
shift
;;
pull)
set -x
docker compose pull
set +x
shift
;;
build:nocache)
set -x
docker compose build --no-cache
set +x
shift
;;
clean:build)
set -x
# Don't run 'docker buildx prune' since it may "escape" our bubble
# and affect other projects on the user's workstation/server
cat <<EOF
This command has been deprecated, and at the moment does not do anything.
If you'd like to build without cache, use the command './acore.sh docker build:nocache' or look into the 'docker buildx prune command'
> https://docs.docker.com/engine/reference/commandline/buildx_prune/
EOF
set +x
shift
;;
client-data)
set -x
docker compose up ac-client-data-init
set +x
shift
;;
dev:up)
set -x
docker compose --profile dev up ac-dev-server -d
set +x
shift
;;
dev:build)
set -x
docker compose --profile dev run --rm ac-dev-server bash /azerothcore/acore.sh compiler build
set +x
shift
;;
dev:dash)
set -x
docker compose --profile dev run --rm ac-dev-server bash /azerothcore/acore.sh ${@:2}
set +x
shift
;;
dev:shell)
set -x
docker compose --profile dev up -d ac-dev-server
docker compose --profile dev exec ac-dev-server bash ${@:2}
set +x
shift
;;
build:prod|prod:build)
cat <<EOF
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
./acore.sh docker build
The build will continue in 3 seconds
EOF
sleep 3
set -x
docker compose build
set +x
shift
;;
pull:prod|prod:pull)
cat <<EOF
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
./acore.sh docker pull
The image pull will continue in 3 seconds
EOF
sleep 3
set -x
docker compose pull
set +x
shift
;;
prod:up|start:prod)
cat <<EOF
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
./acore.sh docker start:app
The containers will start in 3 seconds
EOF
sleep 3
set -x
docker compose up
set +x
shift
;;
prod:up:d|start:prod:d)
cat <<EOF
This command is deprecated and is scheduled to be removed. Please update any scripts or automation accordingly to use the other command:
./acore.sh docker start:app:d
The containers will start in 3 seconds
EOF
sleep 3
set -x
docker compose up -d
set +x
shift
;;
attach)
SERVICE="$2"
set -x
docker compose attach "$SERVICE"
set +x
shift
shift # Second to pass the argument
;;
*)
echo "Unknown or empty arg"
usage
exit 1
esac
done

54
apps/docker/entrypoint.sh Normal file
View File

@ -0,0 +1,54 @@
#!/usr/bin/env bash
set -euo pipefail
CONF_DIR="${CONF_DIR:-/azerothcore/env/dist/etc}"
LOGS_DIR="${LOGS_DIR:-/azerothcore/env/dist/logs}"
if ! touch "$CONF_DIR/.write-test" || ! touch "$LOGS_DIR/.write-test"; then
cat <<EOF
===== WARNING =====
The current user doesn't have write permissions for
the configuration dir ($CONF_DIR) or logs dir ($LOGS_DIR).
It's likely that services will fail due to this.
This is usually caused by cloning the repository as root,
so the files are owned by root (uid 0).
To resolve this, you can set the ownership of the
configuration directory with the command on the host machine.
Note that if the files are owned as root, the ownership must
be changed as root (hence sudo).
$ sudo chown -R $(id -u):$(id -g) /path/to$CONF_DIR /path/to$LOGS_DIR
Alternatively, you can set the DOCKER_USER environment
variable (on the host machine) to "root", though this
isn't recommended.
$ DOCKER_USER=root docker-compose up -d
====================
EOF
fi
[[ -f "$CONF_DIR/.write-test" ]] && rm -f "$CONF_DIR/.write-test"
[[ -f "$LOGS_DIR/.write-test" ]] && rm -f "$LOGS_DIR/.write-test"
# Copy all default config files to env/dist/etc if they don't already exist
# -r == recursive
# -n == no clobber (don't overwrite)
# -v == be verbose
cp -rnv /azerothcore/env/ref/etc/* "$CONF_DIR"
CONF="$CONF_DIR/$ACORE_COMPONENT.conf"
CONF_DIST="$CONF_DIR/$ACORE_COMPONENT.conf.dist"
# Copy the "dist" file to the "conf" if the conf doesn't already exist
if [[ -f "$CONF_DIST" ]]; then
cp -vn "$CONF_DIST" "$CONF"
else
touch "$CONF"
fi
echo "Starting $ACORE_COMPONENT..."
exec "$@"

View File

@ -0,0 +1,83 @@
@ECHO OFF
CLS
:MENU
ECHO.
ECHO ...............................................
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
ECHO ...............................................
ECHO PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT.
ECHO ...............................................
ECHO.
ECHO WARNING! when extracting the vmaps extractor will
ECHO output the text below, it's intended and not an error:
ECHO ..........................................
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
ECHO No such file.
ECHO Couldn't open RootWmo!!!
ECHO Done!
ECHO ..........................................
ECHO.
ECHO Press 1, 2, 3 or 4 to start extracting or 5 to exit.
ECHO 1 - Extract base files (NEEDED) and cameras.
ECHO 2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)
ECHO 3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)
ECHO 4 - Extract all (may take hours)
ECHO 5 - EXIT
ECHO.
SET /P M=Type 1, 2, 3, 4 or 5 then press ENTER:
IF %M%==1 GOTO MAPS
IF %M%==2 GOTO VMAPS
IF %M%==3 GOTO MMAPS
IF %M%==4 GOTO ALL
IF %M%==5 GOTO :EOF
:MAPS
start /b /w map_extractor.exe
GOTO MENU
:VMAPS
start /b /w vmap4_extractor.exe
if exist vmaps\ (
echo folder found.
) else (
echo creating folder "vmaps".
mkdir "vmaps"
)
start /b /w vmap4_assembler.exe Buildings vmaps
rmdir Buildings /s /q
GOTO MENU
:MMAPS
ECHO This may take a few hours to complete. Please be patient.
PAUSE
if exist mmaps\ (
echo folder found.
) else (
echo creating folder "mmaps".
mkdir "mmaps"
)
start /b /w mmaps_generator.exe
GOTO MENU
:ALL
ECHO This may take a few hours to complete. Please be patient.
PAUSE
if exist vmaps\ (
echo folder found.
) else (
echo creating folder "vmaps".
mkdir "vmaps"
)
if exist mmaps\ (
echo folder found.
) else (
echo creating folder "mmaps".
mkdir "mmaps"
)
start /b /w map_extractor.exe
start /b /w vmap4_extractor.exe
start /b /w vmap4_assembler.exe Buildings vmaps
rmdir Buildings /s /q
start /b /w mmaps_generator.exe
GOTO MENU

View File

@ -0,0 +1,83 @@
@ECHO OFF
CLS
:MENU
ECHO.
ECHO ...............................................
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
ECHO ...............................................
ECHO PRESIONE 1, 2, 3 O 4 para seleccionar su tarea, o 5 para SALIR.
ECHO ...............................................
ECHO.
ECHO ADVERTENCIA: al extraer los vmaps del extractor
ECHO la salida del texto de abajo, es intencional y no un error:
ECHO ..........................................
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
ECHO No such file.
ECHO Couldn't open RootWmo!!!
ECHO Done!
ECHO ..........................................
ECHO.
ECHO Pulse 1, 2, 3 o 4 para iniciar la extraccion o 5 para salir.
ECHO 1 - Extraer los archivos base (NECESARIOS) y las cámaras.
ECHO 2 - Extraer vmaps (necesita que los mapas se extraigan antes de ejecutar esto) (OPCIONAL, muy recomendable)
ECHO 3 - Extraer mmaps (necesita que los vmaps se extraigan antes de ejecutar esto, puede llevar horas) (OPCIONAL, muy recomendable)
ECHO 4 - Extraer todo (puede llevar varias horas)
ECHO 5 - SALIR
ECHO.
SET /P M=Escriba 1, 2, 3, 4 o 5 y pulse ENTER:
IF %M%==1 GOTO MAPS
IF %M%==2 GOTO VMAPS
IF %M%==3 GOTO MMAPS
IF %M%==4 GOTO ALL
IF %M%==5 GOTO :EOF
:MAPS
start /b /w map_extractor.exe
GOTO MENU
:VMAPS
start /b /w vmap4_extractor.exe
if exist vmaps\ (
echo folder found.
) else (
echo creating folder "vmaps".
mkdir "vmaps"
)
start /b /w vmap4_assembler.exe Buildings vmaps
rmdir Buildings /s /q
GOTO MENU
:MMAPS
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
PAUSE
if exist mmaps\ (
echo folder found.
) else (
echo creating folder "mmaps".
mkdir "mmaps"
)
start /b /w mmaps_generator.exe
GOTO MENU
:ALL
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
PAUSE
if exist vmaps\ (
echo folder found.
) else (
echo creating folder "vmaps".
mkdir "vmaps"
)
if exist mmaps\ (
echo folder found.
) else (
echo creating folder "mmaps".
mkdir "mmaps"
)
start /b /w map_extractor.exe
start /b /w vmap4_extractor.exe
start /b /w vmap4_assembler.exe Buildings vmaps
rmdir Buildings /s /q
start /b /w mmaps_generator.exe
GOTO MENU

View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
## Set a local git commit template
git config --local commit.template ".git_commit_template.txt" ;
echo "--- Successfully set the default commit template for this repository only. Verify with: git config -e"

View File

@ -0,0 +1,34 @@
#!/usr/bin/env bash
#######################
#
# README
#
# This script is used to automatically update
# submodules and subrepos included in this project
# Subrepo are updated in bidirectional way (pull + push)
# because they are intended to be developed by this organization
#
# NOTE: only maintainers and CI should run this script and
# keep it updated
#
#######################
set -e
ROOT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../"
# update all submodules
git submodule update --init --recursive
git submodule foreach git pull origin master
# include libraries for git subrepo
source "$ROOT_PATH/deps/git-subrepo/.rc"
source "$ROOT_PATH/deps/acore/bash-lib/src/git-utils/subrepo.sh"
echo "> Pulling and update all subrepos"
subrepoUpdate https://github.com/azerothcore/bash-lib master deps/acore/bash-lib
subrepoUpdate https://github.com/azerothcore/cmake-utils master deps/acore/cmake-utils
subrepoUpdate https://github.com/azerothcore/mysql-tools master deps/acore/mysql-tools
subrepoUpdate https://github.com/azerothcore/joiner master deps/acore/joiner

1319
apps/grafana/1_General.json Normal file

File diff suppressed because it is too large Load Diff

691
apps/grafana/2_Maps.json Normal file
View File

@ -0,0 +1,691 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
},
{
"datasource": "Influx",
"enable": true,
"iconColor": "#C0C6BE",
"iconSize": 13,
"lineColor": "rgba(255, 96, 96, 0.592157)",
"name": "Global Events",
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
"showLine": true,
"textColumn": "text",
"titleColumn": "title"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 6,
"iteration": 1595939001794,
"links": [],
"panels": [
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "Influx",
"editable": true,
"error": false,
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 7,
"w": 24,
"x": 0,
"y": 0
},
"hiddenSeries": false,
"id": 2,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [
{
"alias": "Unload tile",
"transform": "negative-Y"
}
],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "Load tile",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'LoadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
},
{
"alias": "Unload tile",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'UnloadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": true,
"refId": "B",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Map",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "Influx",
"editable": true,
"error": false,
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 7,
"w": 24,
"x": 0,
"y": 7
},
"hiddenSeries": false,
"id": 1,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "Pathfinding queries",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"null"
],
"type": "fill"
}
],
"query": "SELECT count(\"title\") FROM \"mmap_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'CalculatePath' AND $timeFilter GROUP BY time($interval) fill(0)",
"rawQuery": true,
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": []
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "MMap",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": null,
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"fill": 5,
"fillGradient": 0,
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 14
},
"hiddenSeries": false,
"id": 4,
"legend": {
"avg": false,
"current": false,
"hideEmpty": false,
"hideZero": true,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"map_id"
],
"type": "tag"
},
{
"params": [
"map_instanceid"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "map_creatures",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "realm",
"operator": "=~",
"value": "/^$realm$/"
}
]
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Creatures",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
},
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": null,
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"fill": 5,
"fillGradient": 0,
"gridPos": {
"h": 8,
"w": 24,
"x": 0,
"y": 22
},
"hiddenSeries": false,
"id": 5,
"legend": {
"avg": false,
"current": false,
"hideEmpty": false,
"hideZero": true,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 1,
"nullPointMode": "null",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 2,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
"groupBy": [
{
"params": [
"$__interval"
],
"type": "time"
},
{
"params": [
"map_id"
],
"type": "tag"
},
{
"params": [
"map_instanceid"
],
"type": "tag"
},
{
"params": [
"none"
],
"type": "fill"
}
],
"measurement": "map_gameobjects",
"orderByTime": "ASC",
"policy": "default",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "realm",
"operator": "=~",
"value": "/^$realm$/"
}
]
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Gameobjects",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "individual"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"label": null,
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"refresh": "1m",
"schemaVersion": 25,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"allFormat": "regex values",
"allValue": null,
"current": {
"text": "Acore",
"value": "Acore"
},
"datasource": "Influx",
"definition": "",
"hide": 0,
"includeAll": false,
"label": null,
"multi": false,
"multiFormat": "regex values",
"name": "realm",
"options": [],
"query": "show tag values from events with key = realm",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"tagValuesQuery": "",
"tags": [],
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {
"now": true,
"refresh_intervals": [
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "browser",
"title": "Maps, vmaps and mmaps",
"uid": "6IhqWiWGz",
"version": 2
}

280
apps/grafana/3_Network.json Normal file
View File

@ -0,0 +1,280 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": "-- Grafana --",
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
},
{
"datasource": "Influx",
"enable": true,
"iconColor": "#C0C6BE",
"iconSize": 13,
"lineColor": "rgba(255, 96, 96, 0.592157)",
"name": "Global Events",
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
"showLine": true,
"textColumn": "text",
"titleColumn": "title"
}
]
},
"editable": true,
"gnetId": null,
"graphTooltip": 0,
"id": 7,
"iteration": 1595939048589,
"links": [],
"panels": [
{
"aliasColors": {},
"bars": false,
"dashLength": 10,
"dashes": false,
"datasource": "Influx",
"editable": true,
"error": false,
"fieldConfig": {
"defaults": {
"custom": {}
},
"overrides": []
},
"fill": 1,
"fillGradient": 0,
"grid": {},
"gridPos": {
"h": 7,
"w": 24,
"x": 0,
"y": 0
},
"hiddenSeries": false,
"id": 1,
"isNew": true,
"legend": {
"avg": false,
"current": false,
"max": false,
"min": false,
"show": true,
"total": false,
"values": false
},
"lines": true,
"linewidth": 2,
"links": [],
"nullPointMode": "connected",
"options": {
"dataLinks": []
},
"percentage": false,
"pointradius": 5,
"points": false,
"renderer": "flot",
"seriesOverrides": [],
"spaceLength": 10,
"stack": false,
"steppedLine": false,
"targets": [
{
"alias": "Processed packets",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"measurement": "processed_packets",
"query": "SELECT sum(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
"refId": "A",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "sum"
}
]
],
"tags": [
{
"key": "realm",
"operator": "=~",
"value": "/$realm$/"
}
]
},
{
"alias": "Processed packets / mean per session",
"dsType": "influxdb",
"groupBy": [
{
"params": [
"$interval"
],
"type": "time"
},
{
"params": [
"0"
],
"type": "fill"
}
],
"measurement": "processed_packets",
"query": "SELECT mean(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
"refId": "B",
"resultFormat": "time_series",
"select": [
[
{
"params": [
"value"
],
"type": "field"
},
{
"params": [],
"type": "mean"
}
]
],
"tags": [
{
"key": "realm",
"operator": "=~",
"value": "/$realm$/"
}
]
}
],
"thresholds": [],
"timeFrom": null,
"timeRegions": [],
"timeShift": null,
"title": "Processed packets",
"tooltip": {
"shared": true,
"sort": 0,
"value_type": "cumulative"
},
"type": "graph",
"xaxis": {
"buckets": null,
"mode": "time",
"name": null,
"show": true,
"values": []
},
"yaxes": [
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
},
{
"format": "short",
"logBase": 1,
"max": null,
"min": null,
"show": true
}
],
"yaxis": {
"align": false,
"alignLevel": null
}
}
],
"refresh": "1m",
"schemaVersion": 25,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"allFormat": "regex values",
"allValue": null,
"current": {
"text": "Acore",
"value": "Acore"
},
"datasource": "Influx",
"definition": "",
"hide": 0,
"includeAll": false,
"label": null,
"multi": false,
"multiFormat": "regex values",
"name": "realm",
"options": [],
"query": "show tag values from events with key = realm",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"tagValuesQuery": "",
"tags": [],
"tagsQuery": "",
"type": "query",
"useTags": false
}
]
},
"time": {
"from": "now-15m",
"to": "now"
},
"timepicker": {
"now": true,
"refresh_intervals": [
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"time_options": [
"5m",
"15m",
"1h",
"6h",
"12h",
"24h",
"2d",
"7d",
"30d"
]
},
"timezone": "browser",
"title": "Network",
"uid": "_QtkMmWMk",
"version": 2
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,253 @@
function inst_configureOS() {
echo "Platform: $OSTYPE"
case "$OSTYPE" in
solaris*) echo "Solaris is not supported yet" ;;
darwin*) source "$AC_PATH_INSTALLER/includes/os_configs/osx.sh" ;;
linux*)
# If $OSDISTRO is set, use this value (from config.sh)
if [ ! -z "$OSDISTRO" ]; then
DISTRO=$OSDISTRO
# If available, use LSB to identify distribution
elif command -v lsb_release >/dev/null 2>&1 ; then
DISTRO=$(lsb_release -is)
# Otherwise, use release info file
else
DISTRO=$(ls -d /etc/[A-Za-z]*[_-][rv]e[lr]* | grep -v "lsb" | cut -d'/' -f3 | cut -d'-' -f1 | cut -d'_' -f1)
fi
case $DISTRO in
# add here distro that are debian or ubuntu based
# TODO: find a better way, maybe checking the existance
# of a package manager
"neon" | "ubuntu" | "Ubuntu")
DISTRO="ubuntu"
;;
"debian" | "Debian")
DISTRO="debian"
;;
*)
echo "Distro: $DISTRO, is not supported. If your distribution is based on debian or ubuntu,
please set the 'OSDISTRO' environment variable to one of these distro (you can use config.sh file)"
;;
esac
DISTRO=${DISTRO,,}
echo "Distro: $DISTRO"
# TODO: implement different configurations by distro
source "$AC_PATH_INSTALLER/includes/os_configs/$DISTRO.sh"
;;
*bsd*) echo "BSD is not supported yet" ;;
msys*) source "$AC_PATH_INSTALLER/includes/os_configs/windows.sh" ;;
*) echo "This platform is not supported" ;;
esac
}
function inst_updateRepo() {
cd "$AC_PATH_ROOT"
if [ ! -z $INSTALLER_PULL_FROM ]; then
git pull "$ORIGIN_REMOTE" "$INSTALLER_PULL_FROM"
else
git pull "$ORIGIN_REMOTE" $(git rev-parse --abbrev-ref HEAD)
fi
}
function inst_resetRepo() {
cd "$AC_PATH_ROOT"
git reset --hard $(git rev-parse --abbrev-ref HEAD)
git clean -f
}
function inst_compile() {
comp_configure
comp_build
}
function inst_cleanCompile() {
comp_clean
inst_compile
}
function inst_allInOne() {
inst_configureOS
inst_compile
dbasm_import true true true
}
function inst_getVersionBranch() {
local res="master"
local v="not-defined"
local MODULE_MAJOR=0
local MODULE_MINOR=0
local MODULE_PATCH=0
local MODULE_SPECIAL=0;
local ACV_MAJOR=0
local ACV_MINOR=0
local ACV_PATCH=0
local ACV_SPECIAL=0;
local curldata=$(curl -f --silent -H 'Cache-Control: no-cache' "$1" || echo "{}")
local parsed=$(echo "$curldata" | "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.compatibility.*.[version,branch]')
semverParseInto "$ACORE_VERSION" ACV_MAJOR ACV_MINOR ACV_PATCH ACV_SPECIAL
if [[ ! -z "$parsed" ]]; then
readarray -t vers < <(echo "$parsed")
local idx
res="none"
# since we've the pair version,branch alternated in not associative and one-dimensional
# array, we've to simulate the association with length/2 trick
for idx in `seq 0 $((${#vers[*]}/2-1))`; do
semverParseInto "${vers[idx*2]}" MODULE_MAJOR MODULE_MINOR MODULE_PATCH MODULE_SPECIAL
if [[ $MODULE_MAJOR -eq $ACV_MAJOR && $MODULE_MINOR -le $ACV_MINOR ]]; then
res="${vers[idx*2+1]}"
v="${vers[idx*2]}"
fi
done
fi
echo "$v" "$res"
}
function inst_module_search {
local res="$1"
local idx=0;
if [ -z "$1" ]; then
echo "Type what to search or leave blank for full list"
read -p "Insert name: " res
fi
local search="+$res"
echo "Searching $res..."
echo "";
readarray -t MODS < <(curl --silent "https://api.github.com/search/repositories?q=org%3Aazerothcore${search}+fork%3Atrue+topic%3Acore-module+sort%3Astars&type=" \
| "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.items.*.name')
while (( ${#MODS[@]} > idx )); do
mod="${MODS[idx++]}"
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$mod/master/acore-module.json")
if [[ "$b" != "none" ]]; then
echo "-> $mod (tested with AC version: $v)"
else
echo "-> $mod (no revision available for AC v$AC_VERSION, it could not work!)"
fi
done
echo "";
echo "";
}
function inst_module_install {
local res
if [ -z "$1" ]; then
echo "Type the name of the module to install"
read -p "Insert name: " res
else
res="$1"
fi
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$res/master/acore-module.json")
if [[ "$b" != "none" ]]; then
Joiner:add_repo "https://github.com/azerothcore/$res" "$res" "$b" && echo "Done, please re-run compiling and db assembly. Read instruction on module repository for more information"
else
echo "Cannot install $res module: it doesn't exists or no version compatible with AC v$ACORE_VERSION are available"
fi
echo "";
echo "";
}
function inst_module_update {
local res;
local _tmp;
local branch;
local p;
if [ -z "$1" ]; then
echo "Type the name of the module to update"
read -p "Insert name: " res
else
res="$1"
fi
_tmp=$PWD
if [ -d "$J_PATH_MODULES/$res/" ]; then
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$res/master/acore-module.json")
cd "$J_PATH_MODULES/$res/"
# use current branch if something wrong with json
if [[ "$v" == "none" || "$v" == "not-defined" ]]; then
b=`git rev-parse --abbrev-ref HEAD`
fi
Joiner:upd_repo "https://github.com/azerothcore/$res" "$res" "$b" && echo "Done, please re-run compiling and db assembly" || echo "Cannot update"
cd $_tmp
else
echo "Cannot update! Path doesn't exist"
fi;
echo "";
echo "";
}
function inst_module_remove {
if [ -z "$1" ]; then
echo "Type the name of the module to remove"
read -p "Insert name: " res
else
res="$1"
fi
Joiner:remove "$res" && echo "Done, please re-run compiling" || echo "Cannot remove"
echo "";
echo "";
}
function inst_simple_restarter {
echo "Running $1 ..."
bash "$AC_PATH_APPS/startup-scripts/simple-restarter" "$AC_BINPATH_FULL" "$1"
echo
#disown -a
#jobs -l
}
function inst_download_client_data {
# change the following version when needed
local VERSION=v16
echo "#######################"
echo "Client data downloader"
echo "#######################"
# first check if it's defined in env, otherwise use the default
local path="${DATAPATH:-$AC_BINPATH_FULL}"
local zipPath="${DATAPATH_ZIP:-"$path/data.zip"}"
dataVersionFile="$path/data-version"
[ -f "$dataVersionFile" ] && source "$dataVersionFile"
# create the path if doesn't exists
mkdir -p "$path"
if [ "$VERSION" == "$INSTALLED_VERSION" ]; then
echo "Data $VERSION already installed. If you want to force the download remove the following file: $dataVersionFile"
return
fi
echo "Downloading client data in: $zipPath ..."
curl -L https://github.com/wowgaming/client-data/releases/download/$VERSION/data.zip > "$zipPath" \
&& echo "unzip downloaded file in $path..." && unzip -q -o "$zipPath" -d "$path/" \
&& echo "Remove downloaded file" && rm "$zipPath" \
&& echo "INSTALLED_VERSION=$VERSION" > "$dataVersionFile"
}

View File

@ -0,0 +1,22 @@
[[ ${INSTALLER_GUARDYVAR:-} -eq 1 ]] && return || readonly INSTALLER_GUARDYVAR=1 # include it once
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd )
source "$CURRENT_PATH/../../bash_shared/includes.sh"
AC_PATH_INSTALLER="$AC_PATH_APPS/installer"
J_PATH="$AC_PATH_DEPS/acore/joiner"
J_PATH_MODULES="$AC_PATH_MODULES"
source "$J_PATH/joiner.sh"
if [ -f "$AC_PATH_INSTALLER/config.sh" ]; then
source "$AC_PATH_INSTALLER/config.sh" # should overwrite previous
fi
source "$AC_PATH_APPS/compiler/includes/includes.sh"
source "$AC_PATH_DEPS/semver_bash/semver.sh"
source "$AC_PATH_INSTALLER/includes/functions.sh"

Some files were not shown because too many files have changed in this diff Show More