diff --git a/.gitignore b/.gitignore index 5774f9e..2fefbea 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,4 @@ -# Cloned repositories (managed by devtools.sh) +# Cloned repositories (managed via repos.conf) teiserver/ bar-lobby/ spads_config_bar/ @@ -8,11 +8,16 @@ BYAR-Chobby bar-db/ bar-live-services/ RecoilEngine +lua-doc-extractor SPADS/ SpringLobbyInterface/ # Personal config overrides repos.local.conf +.env + +# Build artifacts +bar-lua-codemod/target/ # Runtime / editor state tasks/ diff --git a/Justfile b/Justfile new file mode 100644 index 0000000..1472365 --- /dev/null +++ b/Justfile @@ -0,0 +1,18 @@ +set dotenv-load + +mod services 'just/services.just' +mod repos 'just/repos.just' +mod engine 'just/engine.just' +mod setup 'just/setup.just' +mod link 'just/link.just' +mod lua 'just/lua.just' +mod docs 'just/docs.just' +mod bar 'just/bar.just' +mod tei 'just/tei.just' + +default: + @just --list --list-submodules + +reset: + just lua::reset + just docs::reset diff --git a/README.md b/README.md index 47a2864..b323b74 100644 --- a/README.md +++ b/README.md @@ -9,13 +9,13 @@ Everything server-side runs in Docker. The game client runs natively. ```bash git clone https://github.com/thvl3/BAR-Devtools.git cd BAR-Devtools -./devtools.sh init -./devtools.sh up +just setup::init +just services::up ``` -`init` walks you through installing dependencies, cloning repositories, and building Docker images. You only need to run it once. +`setup::init` walks you through installing dependencies, cloning repositories, and building Docker images. You only need to run it once. -`up` starts PostgreSQL and Teiserver. On first run it seeds the database with test data and creates default accounts (~2-3 minutes). Subsequent starts are fast. +`services::up` starts PostgreSQL and Teiserver. On first run it seeds the database with test data and creates default accounts (~2-3 minutes). Subsequent starts are fast. Once running: @@ -30,41 +30,113 @@ Once running: ## Requirements -- **Linux** (Arch, Debian/Ubuntu, or Fedora) +- **Linux** (Arch, Debian/Ubuntu, or Fedora) or **macOS** - **Docker** with Compose V2 - **Git** +- **Bash 4+** (Linux ships this; macOS needs `brew install bash`) +- **[just](https://github.com/casey/just)** -- command runner + +```bash +# Install just +pacman -S just # Arch +dnf install just # Fedora +apt install just # Debian/Ubuntu +brew install just # Homebrew + +# macOS only: install modern bash (macOS ships bash 3.2 which is too old) +brew install bash +``` + +Optional: + - **Node.js** (only needed if running bar-lobby) -`./devtools.sh install-deps` will detect your distro and install what's missing. +`just setup::deps` will detect your distro and install what's missing (except `just` itself). ## Commands -### Getting Started +Run `just` with no arguments to list everything: + +``` +$ just +Available recipes: + ... +``` + +### Setup -| Command | Description | -|---------|-------------| -| `init` | Full first-time setup: install deps, clone repos, build images | -| `install-deps` | Install system packages (docker, git, nodejs) | +| Recipe | Description | +|--------|-------------| +| `just setup::init` | Full first-time setup: install deps, clone repos, build images | +| `just setup::deps` | Install system packages (docker, git, nodejs) | +| `just setup::check` | Check prerequisites and build Docker images | ### Services -| Command | Description | -|---------|-------------| -| `up [lobby] [spads]` | Start services (options are additive) | -| `down` | Stop all services | -| `status` | Show running containers | -| `logs [service]` | Tail logs (postgres, teiserver, spads, or all) | -| `lobby` | Start bar-lobby dev server standalone | -| `shell [service]` | Shell into a container (default: teiserver) | -| `reset` | Destroy all data and rebuild from scratch | +| Recipe | Description | +|--------|-------------| +| `just services::up [lobby] [spads]` | Start services (options are additive) | +| `just services::down` | Stop all services | +| `just services::status` | Show running containers | +| `just services::logs [service]` | Tail logs (postgres, teiserver, spads, or all) | +| `just services::lobby` | Start bar-lobby dev server standalone | +| `just services::shell [service]` | Shell into a container (default: teiserver) | +| `just services::build` | Build Docker images | +| `just services::reset` | Destroy all data and rebuild from scratch | ### Repositories -| Command | Description | -|---------|-------------| -| `clone [group]` | Clone/update repos. Groups: `core`, `extra`, `all` | -| `repos` | Show status of all configured repositories | -| `update` | Pull latest on all cloned repos (fast-forward only) | +| Recipe | Description | +|--------|-------------| +| `just repos::clone [group]` | Clone/update repos. Groups: `core`, `extra`, `all` | +| `just repos::status` | Show status of all configured repositories | +| `just repos::update` | Pull latest on all cloned repos (fast-forward only) | + +### Engine + +| Recipe | Description | +|--------|-------------| +| `just engine::build [cmake-args]` | Build Recoil engine via docker-build-v2 | + +### Game Directory + +| Recipe | Description | +|--------|-------------| +| `just link::status` | Show symlink status | +| `just link::create ` | Symlink a repo into the game directory (engine, chobby, bar) | + +### Lua Tooling + +| Recipe | Description | +|--------|-------------| +| `just lua::build-lde` | Build lua-doc-extractor from local checkout | +| `just lua::library` | Extract Lua docs from RecoilEngine, copy into BAR submodule | +| `just lua::library-reload` | Generate library then restart LuaLS | + +### Documentation + +| Recipe | Description | +|--------|-------------| +| `just docs::generate` | Generate Lua API doc pages | +| `just docs::server` | Generate + start Hugo dev server | +| `just docs::server-only` | Start Hugo dev server without regenerating | + +### BAR (Beyond All Reason) + +| Recipe | Description | +|--------|-------------| +| `just bar::lint` | Lint BAR Lua code (luacheck via lux) | +| `just bar::fmt` | Format BAR Lua code (stylua via lux) | +| `just bar::test` | Run busted unit tests in the BAR container | +| `just bar::integrations` | Run headless integration tests (x86-64 only) | +| `just bar::all` | Run all BAR tests (units + integrations) | +| `just bar::setup-hooks` | Install git pre-commit hook in the BAR repo | + +### Teiserver + +| Recipe | Description | +|--------|-------------| +| `just tei::mix` | Run teiserver mix tests | ## Using Your Own Forks @@ -84,17 +156,27 @@ bar-lobby https://github.com/yourname/bar-lobby.git your-branch core Then clone or re-clone: ```bash -./devtools.sh clone core +just repos::clone core ``` `repos.local.conf` is gitignored so it won't affect anyone else. +### Local paths + +You can also point a repo entry at a local directory instead of cloning. Add a fifth column with the path: + +``` +lua-doc-extractor https://github.com/rhys-vdw/lua-doc-extractor.git main extra ~/code/lua-doc-extractor +``` + +This creates a symlink instead of cloning. + ## Repository Config Format `repos.conf` uses a simple whitespace-delimited format: ``` -# directory url branch group +# directory url branch group [local_path] teiserver https://github.com/beyond-all-reason/teiserver.git master core ``` @@ -102,24 +184,41 @@ teiserver https://github.com/beyond-all-reason/teiserver.git master c - **url** -- git clone URL - **branch** -- branch to checkout - **group** -- `core` (required for the dev stack) or `extra` (optional) +- **local_path** -- (optional) absolute or `~`-relative path to symlink instead of cloning ## Architecture ``` BAR-Devtools/ -├── devtools.sh # Main CLI script -├── repos.conf # Repository sources & branches -├── docker-compose.dev.yml # Service definitions +├── Justfile # Root command runner (lists all modules) +├── just/ +│ ├── services.just # Docker Compose service management +│ ├── repos.just # Git repository operations +│ ├── engine.just # RecoilEngine build +│ ├── setup.just # First-time setup & dependency install +│ ├── link.just # Game directory symlinking +│ ├── lua.just # lua-doc-extractor & Lua library generation +│ ├── docs.just # Hugo documentation server +│ └── test.just # Unit & integration tests +├── scripts/ +│ ├── common.sh # Shared color/logging helpers +│ ├── repos.sh # repos.conf parsing & git operations +│ └── setup.sh # Distro detection, deps, prerequisite checks +├── repos.conf # Repository sources & branches +├── docker-compose.dev.yml # Service definitions ├── docker/ -│ ├── teiserver.dev.Dockerfile # Teiserver dev image (Elixir + Phoenix) -│ ├── teiserver-entrypoint.sh # DB init, seeding, migrations -│ ├── teiserver.dockerignore # Build context optimization -│ ├── setup-spads-bot.exs # Creates SPADS bot account in Teiserver -│ ├── spads-dev-entrypoint.sh # SPADS startup + game data download -│ └── spads_dev.conf # Simplified SPADS config for dev -├── teiserver/ # ← cloned by devtools.sh (gitignored) -├── bar-lobby/ # ← cloned by devtools.sh (gitignored) -└── spads_config_bar/ # ← cloned by devtools.sh (gitignored) +│ ├── teiserver.dev.Dockerfile # Teiserver dev image (Elixir + Phoenix) +│ ├── teiserver-entrypoint.sh # DB init, seeding, migrations +│ ├── teiserver.dockerignore # Build context optimization +│ ├── bar.Dockerfile # BAR test environment (Lua 5.1 + lux) +│ ├── setup-spads-bot.exs # Creates SPADS bot account in Teiserver +│ ├── spads-dev-entrypoint.sh # SPADS startup + game data download +│ └── spads_dev.conf # Simplified SPADS config for dev +├── teiserver/ # ← cloned by just repos::clone (gitignored) +├── bar-lobby/ # ← cloned (gitignored) +├── Beyond-All-Reason/ # ← cloned (gitignored) +├── RecoilEngine/ # ← cloned (gitignored) +└── spads_config_bar/ # ← cloned (gitignored) ``` ### What the Docker stack does @@ -130,8 +229,9 @@ BAR-Devtools/ - Seeds fake data (test users, matchmaking data) - Sets up Tachyon OAuth - Creates a `spadsbot` account with Bot/Moderator roles -- **SPADS** (optional, `up spads`) -- Perl autohost using `badosu/spads:latest`. Downloads game data via `pr-downloader` on first run. Connects to Teiserver via Spring protocol on port 8200. +- **SPADS** (optional, `services::up spads`) -- Perl autohost using `badosu/spads:latest`. Downloads game data via `pr-downloader` on first run. Connects to Teiserver via Spring protocol on port 8200. - **bar-lobby** -- Electron/Vue.js game client, runs natively on the host (not in Docker) +- **BAR test runner** (`test` profile) -- Ubuntu container with Lua 5.1 and [lux](https://github.com/lumen-oss/lux) for running busted unit tests against the Beyond-All-Reason codebase ### Ports @@ -148,8 +248,8 @@ BAR-Devtools/ SPADS is optional and started separately because it requires downloading ~300MB of game data on first run. The download depends on external rapid repositories that can be unreliable. ```bash -./devtools.sh up spads # Start with SPADS -./devtools.sh logs spads # Check SPADS status +just services::up spads # Start with SPADS +just services::logs spads # Check SPADS status ``` The SPADS bot account (`spadsbot` / `password`) is created automatically during Teiserver initialization. @@ -159,21 +259,21 @@ The SPADS bot account (`spadsbot` / `password`) is created automatically during **Port 5432/5433 conflict with host PostgreSQL:** Either stop your local PostgreSQL (`sudo systemctl stop postgresql`) or change the port: ```bash -BAR_POSTGRES_PORT=5434 ./devtools.sh up +BAR_POSTGRES_PORT=5434 just services::up ``` **Teiserver takes forever on first run:** The initial database seeding includes generating fake data. Follow progress with: ```bash -./devtools.sh logs teiserver +just services::logs teiserver ``` **SPADS fails with "No Spring map/mod found":** Game data download may have failed. Check logs and retry: ```bash -./devtools.sh logs spads -./devtools.sh down -./devtools.sh up spads +just services::logs spads +just services::down +just services::up spads ``` **Docker permission denied:** @@ -184,6 +284,6 @@ sudo usermod -aG docker $USER **Nuclear option -- start completely fresh:** ```bash -./devtools.sh reset -./devtools.sh up +just services::reset +just services::up ``` diff --git a/bar-lua-codemod/Cargo.lock b/bar-lua-codemod/Cargo.lock new file mode 100644 index 0000000..696feaa --- /dev/null +++ b/bar-lua-codemod/Cargo.lock @@ -0,0 +1,361 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "anstream" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "824a212faf96e9acacdbd09febd34438f8f711fb84e09a8916013cd7815ca28d" +dependencies = [ + "anstyle", + "anstyle-parse", + "anstyle-query", + "anstyle-wincon", + "colorchoice", + "is_terminal_polyfill", + "utf8parse", +] + +[[package]] +name = "anstyle" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "940b3a0ca603d1eade50a4846a2afffd5ef57a9feac2c0e2ec2e14f9ead76000" + +[[package]] +name = "anstyle-parse" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52ce7f38b242319f7cabaa6813055467063ecdc9d355bbb4ce0c68908cd8130e" +dependencies = [ + "utf8parse", +] + +[[package]] +name = "anstyle-query" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "anstyle-wincon" +version = "3.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d" +dependencies = [ + "anstyle", + "once_cell_polyfill", + "windows-sys", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "bar-lua-codemod" +version = "0.1.0" +dependencies = [ + "clap", + "full_moon", + "glob", +] + +[[package]] +name = "borsh" +version = "1.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfd1e3f8955a5d7de9fab72fc8373fade9fb8a703968cb200ae3dc6cf08e185a" +dependencies = [ + "bytes", + "cfg_aliases", +] + +[[package]] +name = "bytecount" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "175812e0be2bccb6abe50bb8d566126198344f707e304f45c648fd8f2cc0365e" + +[[package]] +name = "bytes" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" + +[[package]] +name = "cfg-if" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801" + +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "clap" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b193af5b67834b676abd72466a96c1024e6a6ad978a1f484bd90b85c94041351" +dependencies = [ + "clap_builder", + "clap_derive", +] + +[[package]] +name = "clap_builder" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714a53001bf66416adb0e2ef5ac857140e7dc3a0c48fb28b2f10762fc4b5069f" +dependencies = [ + "anstream", + "anstyle", + "clap_lex", + "strsim", +] + +[[package]] +name = "clap_derive" +version = "4.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1110bd8a634a1ab8cb04345d8d878267d57c3cf1b38d91b71af6686408bbca6a" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "clap_lex" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c8d4a3bb8b1e0c1050499d1815f5ab16d04f0959b233085fb31653fbfc9d98f9" + +[[package]] +name = "colorchoice" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d07550c9036bf2ae0c684c4297d503f838287c83c53686d05370d0e139ae570" + +[[package]] +name = "derive_more" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "unicode-xid", +] + +[[package]] +name = "full_moon" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "40373a6bf84c41c6124c01cbedf5ab53d0d468adf1c0d7efd4c3273531fbb609" +dependencies = [ + "bytecount", + "cfg-if", + "derive_more", + "full_moon_derive", + "paste", + "serde", + "smol_str", +] + +[[package]] +name = "full_moon_derive" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "99b4bd12ce56927d1dc5478d21528ea8c4b93ca85ff8f8043b6a5351a2a3c6f7" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "glob" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695" + +[[package]] +name = "once_cell_polyfill" +version = "1.70.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" + +[[package]] +name = "paste" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" + +[[package]] +name = "proc-macro2" +version = "1.0.106" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "serde" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.228" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "smol_str" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4aaa7368fcf4852a4c2dd92df0cace6a71f2091ca0a23391ce7f3a31833f1523" +dependencies = [ + "borsh", + "serde_core", +] + +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.117" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "unicode-ident" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75" + +[[package]] +name = "unicode-xid" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853" + +[[package]] +name = "utf8parse" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" + +[[package]] +name = "windows-link" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" + +[[package]] +name = "windows-sys" +version = "0.61.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" +dependencies = [ + "windows-link", +] diff --git a/bar-lua-codemod/Cargo.toml b/bar-lua-codemod/Cargo.toml new file mode 100644 index 0000000..21443d5 --- /dev/null +++ b/bar-lua-codemod/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "bar-lua-codemod" +version = "0.1.0" +edition = "2021" + +[dependencies] +full_moon = "2.1" +glob = "0.3" +clap = { version = "4", features = ["derive"] } diff --git a/bar-lua-codemod/src/bracket_to_dot.rs b/bar-lua-codemod/src/bracket_to_dot.rs new file mode 100644 index 0000000..2ef0d24 --- /dev/null +++ b/bar-lua-codemod/src/bracket_to_dot.rs @@ -0,0 +1,236 @@ +use full_moon::ast::*; +use full_moon::tokenizer::*; +use full_moon::visitors::VisitorMut; + +const LUA_RESERVED: &[&str] = &[ + "and", "break", "do", "else", "elseif", "end", "false", "for", "function", "if", "in", + "local", "nil", "not", "or", "repeat", "return", "then", "true", "until", "while", +]; + +fn is_convertible_identifier(s: &str) -> bool { + let mut chars = s.chars(); + match chars.next() { + Some(c) if c.is_ascii_alphabetic() || c == '_' => {} + _ => return false, + } + chars.all(|c| c.is_ascii_alphanumeric() || c == '_') && !LUA_RESERVED.contains(&s) +} + +fn string_content(expr: &Expression) -> Option { + if let Expression::String(token_ref) = expr { + let s = token_ref.token().to_string(); + if (s.starts_with('"') && s.ends_with('"')) || (s.starts_with('\'') && s.ends_with('\'')) { + return Some(s[1..s.len() - 1].to_string()); + } + } + None +} + +pub struct BracketToDot { + pub index_conversions: usize, + pub field_conversions: usize, + pub skipped_reserved: usize, +} + +impl BracketToDot { + pub fn new() -> Self { + Self { + index_conversions: 0, + field_conversions: 0, + skipped_reserved: 0, + } + } +} + +impl VisitorMut for BracketToDot { + fn visit_index(&mut self, index: Index) -> Index { + if let Index::Brackets { + ref brackets, + ref expression, + } = index + { + if let Some(name) = string_content(expression) { + if is_convertible_identifier(&name) { + self.index_conversions += 1; + let (open, close) = brackets.tokens(); + let leading: Vec = open.leading_trivia().cloned().collect(); + let mut trailing: Vec = close.trailing_trivia().cloned().collect(); + // ]then is fine (] is non-alpha), but .guishaderthen merges. + // Inject a space when the bracket had no trailing whitespace. + if trailing.is_empty() { + trailing.push(Token::new(TokenType::Whitespace { + characters: " ".into(), + })); + } + return Index::Dot { + dot: TokenReference::new( + leading, + Token::new(TokenType::Symbol { + symbol: Symbol::Dot, + }), + vec![], + ), + name: TokenReference::new( + vec![], + Token::new(TokenType::Identifier { + identifier: name.into(), + }), + trailing, + ), + }; + } else if LUA_RESERVED.contains(&name.as_str()) { + self.skipped_reserved += 1; + } + } + } + index + } + + fn visit_field(&mut self, field: Field) -> Field { + if let Field::ExpressionKey { + ref brackets, + ref key, + ref equal, + ref value, + .. + } = field + { + if let Some(name) = string_content(key) { + if is_convertible_identifier(&name) { + self.field_conversions += 1; + let (open, close) = brackets.tokens(); + let leading: Vec = open.leading_trivia().cloned().collect(); + let trailing: Vec = close.trailing_trivia().cloned().collect(); + return Field::NameKey { + key: TokenReference::new( + leading, + Token::new(TokenType::Identifier { + identifier: name.into(), + }), + trailing, + ), + equal: equal.clone(), + value: value.clone(), + }; + } else if LUA_RESERVED.contains(&name.as_str()) { + self.skipped_reserved += 1; + } + } + } + field + } +} + +#[cfg(test)] +mod tests { + use super::*; + use full_moon::{parse, visitors::VisitorMut}; + + fn transform(input: &str) -> (String, usize, usize) { + let ast = parse(input).expect("parse failed"); + let mut visitor = BracketToDot::new(); + let ast = visitor.visit_ast(ast); + ( + ast.to_string(), + visitor.index_conversions, + visitor.field_conversions, + ) + } + + // Index conversions may inject a trailing space to prevent merging with + // the following token (e.g. ]then -> .keythen). Stylua normalizes this. + + #[test] + fn index_simple() { + let (out, idx, fld) = transform(r#"local x = t["foo"]"#); + assert_eq!(out, "local x = t.foo "); + assert_eq!(idx, 1); + assert_eq!(fld, 0); + } + + #[test] + fn index_single_quotes() { + let (out, idx, _) = transform("local x = t['bar']"); + assert_eq!(out, "local x = t.bar "); + assert_eq!(idx, 1); + } + + #[test] + fn index_chained() { + let (out, idx, _) = transform(r#"local x = t["a"]["b"]"#); + assert_eq!(out, "local x = t.a .b "); + assert_eq!(idx, 2); + } + + #[test] + fn index_reserved_word_skipped() { + let (out, _, _) = transform(r#"local x = t["end"]"#); + assert_eq!(out, r#"local x = t["end"]"#); + } + + #[test] + fn index_numeric_key_skipped() { + let (out, idx, _) = transform(r#"local x = t["123"]"#); + assert_eq!(out, r#"local x = t["123"]"#); + assert_eq!(idx, 0); + } + + #[test] + fn index_special_chars_skipped() { + let (out, idx, _) = transform(r#"local x = t["foo-bar"]"#); + assert_eq!(out, r#"local x = t["foo-bar"]"#); + assert_eq!(idx, 0); + } + + #[test] + fn field_simple() { + let (out, idx, fld) = transform(r#"local t = { ["foo"] = 1 }"#); + assert_eq!(out, "local t = { foo = 1 }"); + assert_eq!(idx, 0); + assert_eq!(fld, 1); + } + + #[test] + fn field_reserved_word_skipped() { + let (out, _, fld) = transform(r#"local t = { ["end"] = 1 }"#); + assert_eq!(out, r#"local t = { ["end"] = 1 }"#); + assert_eq!(fld, 0); + } + + #[test] + fn mixed_conversions() { + let (out, idx, fld) = transform(r#"t["x"] = { ["y"] = 1 }"#); + assert_eq!(out, "t.x = { y = 1 }"); + assert_eq!(idx, 1); + assert_eq!(fld, 1); + } + + #[test] + fn underscore_identifier() { + let (out, idx, _) = transform(r#"local x = t["_private"]"#); + assert_eq!(out, "local x = t._private "); + assert_eq!(idx, 1); + } + + #[test] + fn no_changes() { + let (out, idx, fld) = transform("local x = t[42]"); + assert_eq!(out, "local x = t[42]"); + assert_eq!(idx, 0); + assert_eq!(fld, 0); + } + + #[test] + fn no_merge_with_following_keyword() { + let (out, idx, _) = transform("if force and WG['guishader']then end"); + assert!(out.contains("WG.guishader then"), "got: {out}"); + assert_eq!(idx, 1); + } + + #[test] + fn no_merge_with_following_identifier() { + let (out, idx, _) = transform("local x = t['key']or false"); + assert!(out.contains("t.key or"), "got: {out}"); + assert_eq!(idx, 1); + } +} diff --git a/bar-lua-codemod/src/detach_bar_modules.rs b/bar-lua-codemod/src/detach_bar_modules.rs new file mode 100644 index 0000000..a6d523b --- /dev/null +++ b/bar-lua-codemod/src/detach_bar_modules.rs @@ -0,0 +1,155 @@ +use full_moon::ast::*; +use full_moon::tokenizer::*; +use full_moon::visitors::VisitorMut; +use std::collections::HashSet; + +pub struct DetachBarModules { + modules: HashSet, + pub conversions: usize, +} + +impl DetachBarModules { + pub fn new(modules: &[&str]) -> Self { + Self { + modules: modules.iter().map(|s| s.to_string()).collect(), + conversions: 0, + } + } + + /// If prefix is "Spring" and first suffix is `.ModuleName` where ModuleName + /// is in our set, strip the "Spring." prefix by promoting the module name + /// to be the new prefix and dropping the dot suffix. + fn try_rewrite( + &mut self, + prefix: &Prefix, + suffixes: &[Suffix], + ) -> Option<(Prefix, Vec)> { + let Prefix::Name(token_ref) = prefix else { + return None; + }; + if token_ref.token().to_string() != "Spring" { + return None; + } + let Some(Suffix::Index(Index::Dot { name, .. })) = suffixes.first() else { + return None; + }; + let module_name = name.token().to_string(); + if !self.modules.contains(&module_name) { + return None; + } + self.conversions += 1; + let new_prefix = Prefix::Name(TokenReference::new( + token_ref.leading_trivia().cloned().collect(), + Token::new(TokenType::Identifier { + identifier: module_name.as_str().into(), + }), + name.trailing_trivia().cloned().collect(), + )); + let remaining = suffixes[1..].to_vec(); + Some((new_prefix, remaining)) + } +} + +impl VisitorMut for DetachBarModules { + fn visit_function_call(&mut self, call: FunctionCall) -> FunctionCall { + let suffixes: Vec = call.suffixes().cloned().collect(); + if let Some((new_prefix, remaining)) = self.try_rewrite(call.prefix(), &suffixes) { + call.with_prefix(new_prefix) + .with_suffixes(remaining) + } else { + call + } + } + + fn visit_var(&mut self, var: Var) -> Var { + match var { + Var::Expression(var_expr) => { + let suffixes: Vec = var_expr.suffixes().cloned().collect(); + if let Some((new_prefix, remaining)) = + self.try_rewrite(var_expr.prefix(), &suffixes) + { + Var::Expression(Box::new( + var_expr + .with_prefix(new_prefix) + .with_suffixes(remaining), + )) + } else { + Var::Expression(var_expr) + } + } + other => other, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use full_moon::{parse, visitors::VisitorMut}; + + const MODULES: &[&str] = &["I18N", "Utilities", "Debug", "Lava"]; + + fn transform(input: &str) -> (String, usize) { + let ast = parse(input).expect("parse failed"); + let mut visitor = DetachBarModules::new(MODULES); + let ast = visitor.visit_ast(ast); + (ast.to_string(), visitor.conversions) + } + + #[test] + fn simple_call() { + let (out, n) = transform("Spring.I18N.translate(key)"); + assert_eq!(out, "I18N.translate(key)"); + assert_eq!(n, 1); + } + + #[test] + fn method_access() { + let (out, n) = transform("local x = Spring.Utilities.Round(1.5)"); + assert_eq!(out, "local x = Utilities.Round(1.5)"); + assert_eq!(n, 1); + } + + #[test] + fn var_reference() { + let (out, n) = transform("local u = Spring.Utilities"); + assert_eq!(out, "local u = Utilities"); + assert_eq!(n, 1); + } + + #[test] + fn non_module_unchanged() { + let (out, n) = transform("Spring.GetGameFrame()"); + assert_eq!(out, "Spring.GetGameFrame()"); + assert_eq!(n, 0); + } + + #[test] + fn non_spring_unchanged() { + let (out, n) = transform("Other.I18N.translate(key)"); + assert_eq!(out, "Other.I18N.translate(key)"); + assert_eq!(n, 0); + } + + #[test] + fn preserves_trivia() { + let (out, n) = transform(" Spring.Debug.log(msg) -- log it"); + assert_eq!(out, " Debug.log(msg) -- log it"); + assert_eq!(n, 1); + } + + #[test] + fn assignment_declaration() { + let (out, n) = transform("Spring.I18N = Spring.I18N or VFS.Include('i18n.lua')"); + assert_eq!(out, "I18N = I18N or VFS.Include('i18n.lua')"); + assert_eq!(n, 2); + } + + #[test] + fn multiple_in_one_file() { + let (out, n) = transform("Spring.I18N.t('x')\nSpring.Lava.isActive()"); + assert!(out.contains("I18N.t('x')")); + assert!(out.contains("Lava.isActive()")); + assert_eq!(n, 2); + } +} diff --git a/bar-lua-codemod/src/main.rs b/bar-lua-codemod/src/main.rs new file mode 100644 index 0000000..fcc4390 --- /dev/null +++ b/bar-lua-codemod/src/main.rs @@ -0,0 +1,496 @@ +use clap::{Parser, Subcommand}; +use full_moon::visitors::VisitorMut; +use std::path::PathBuf; +use std::{fs, process}; + +mod bracket_to_dot; +mod detach_bar_modules; +mod rename_aliases; +mod spring_split; + +#[derive(Parser)] +#[command(name = "bar-lua-codemod")] +#[command(about = "AST-based Lua codemod tool for Beyond All Reason")] +struct Cli { + #[command(subcommand)] + command: Commands, +} + +#[derive(Subcommand)] +enum Commands { + /// Convert bracket string access to dot notation (x["y"] -> x.y, ["y"] = -> y =) + BracketToDot { + /// Root directory to process + #[arg(long, default_value = ".")] + path: PathBuf, + + /// Directories to exclude (relative to path, may be repeated) + #[arg(long)] + exclude: Vec, + + /// Report changes without writing files + #[arg(long)] + dry_run: bool, + }, + + /// Rename deprecated Spring method aliases to canonical names + RenameAliases { + /// Root directory to process + #[arg(long, default_value = ".")] + path: PathBuf, + + /// Directories to exclude (relative to path, may be repeated) + #[arg(long)] + exclude: Vec, + + /// Report changes without writing files + #[arg(long)] + dry_run: bool, + }, + + /// Detach BAR modules from the Spring table (Spring.I18N -> I18N, etc.) + DetachBarModules { + /// Root directory to process + #[arg(long, default_value = ".")] + path: PathBuf, + + /// Directories to exclude (relative to path, may be repeated) + #[arg(long)] + exclude: Vec, + + /// Report changes without writing files + #[arg(long)] + dry_run: bool, + }, + + /// Replace Spring.X with SpringSynced.X or SpringShared.X based on API stubs + SpringSplit { + /// Root directory to process + #[arg(long, default_value = ".")] + path: PathBuf, + + /// Path to recoil-lua-library/library (contains generated stubs) + #[arg(long)] + library: PathBuf, + + /// Directories to exclude (relative to path, may be repeated) + #[arg(long)] + exclude: Vec, + + /// Report changes without writing files + #[arg(long)] + dry_run: bool, + }, +} + +fn collect_lua_files(root: &PathBuf, excludes: &[String]) -> Vec { + let pattern = format!("{}/**/*.lua", root.display()); + let mut files = Vec::new(); + for entry in glob::glob(&pattern).expect("invalid glob pattern") { + if let Ok(path) = entry { + let rel = path.strip_prefix(root).unwrap_or(&path); + let excluded = excludes + .iter() + .any(|ex| rel.starts_with(ex)); + if !excluded { + files.push(path); + } + } + } + files.sort(); + files +} + +fn format_num(n: usize) -> String { + let s = n.to_string(); + let bytes = s.as_bytes(); + let len = bytes.len(); + let mut result = String::new(); + for (i, &b) in bytes.iter().enumerate() { + if i > 0 && (len - i) % 3 == 0 { + result.push(','); + } + result.push(b as char); + } + result +} + +fn run_bracket_to_dot(root: &PathBuf, excludes: &[String], dry_run: bool) { + let files = collect_lua_files(root, excludes); + let total_files = files.len(); + + if total_files == 0 { + eprintln!("No .lua files found under {}", root.display()); + process::exit(1); + } + + let mut files_changed: usize = 0; + let mut total_index: usize = 0; + let mut total_field: usize = 0; + let mut total_skipped: usize = 0; + let mut errors: usize = 0; + let mut per_file: Vec<(PathBuf, usize, usize)> = Vec::new(); + + for file_path in &files { + let code = match fs::read_to_string(file_path) { + Ok(c) => c, + Err(e) => { + eprintln!(" error reading {}: {}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let ast = match full_moon::parse(&code) { + Ok(a) => a, + Err(e) => { + eprintln!(" parse error in {}: {:?}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let mut visitor = bracket_to_dot::BracketToDot::new(); + let new_ast = visitor.visit_ast(ast); + + if visitor.index_conversions > 0 || visitor.field_conversions > 0 { + if !dry_run { + if let Err(e) = fs::write(file_path, new_ast.to_string()) { + eprintln!(" error writing {}: {}", file_path.display(), e); + errors += 1; + continue; + } + } + files_changed += 1; + total_index += visitor.index_conversions; + total_field += visitor.field_conversions; + total_skipped += visitor.skipped_reserved; + per_file.push(( + file_path.clone(), + visitor.index_conversions, + visitor.field_conversions, + )); + } + } + + let total_conversions = total_index + total_field; + + if dry_run { + println!("bar-lua-codemod bracket-to-dot (DRY RUN):"); + } else { + println!("bar-lua-codemod bracket-to-dot results:"); + } + println!(" Files scanned: {:>30}", format_num(total_files)); + println!(" Files changed: {:>30}", format_num(files_changed)); + println!( + " Index conversions (x[\"y\"] -> x.y): {:>8}", + format_num(total_index) + ); + println!( + " Field conversions ([\"y\"] = -> y =): {:>8}", + format_num(total_field) + ); + println!( + " Total conversions: {:>8}", + format_num(total_conversions) + ); + println!( + " Skipped (reserved words): {:>8}", + format_num(total_skipped) + ); + println!( + " Errors (parse failures): {:>8}", + format_num(errors) + ); + + if !per_file.is_empty() { + per_file.sort_by(|a, b| (b.1 + b.2).cmp(&(a.1 + a.2))); + println!(); + println!("Top files by conversion count:"); + for (path, idx, fld) in per_file.iter().take(20) { + let rel = path.strip_prefix(root).unwrap_or(path); + println!(" {:<60} {:>5}", rel.display(), idx + fld); + } + } +} + +const BAR_ALIASES: &[(&str, &str)] = &[ + ("GetMyTeamID", "GetLocalTeamID"), + ("GetMyAllyTeamID", "GetLocalAllyTeamID"), + ("GetMyPlayerID", "GetLocalPlayerID"), +]; + +fn run_rename_aliases(root: &PathBuf, excludes: &[String], dry_run: bool) { + let files = collect_lua_files(root, excludes); + let total_files = files.len(); + + if total_files == 0 { + eprintln!("No .lua files found under {}", root.display()); + process::exit(1); + } + + let mut files_changed: usize = 0; + let mut total_conversions: usize = 0; + let mut errors: usize = 0; + let mut per_file: Vec<(PathBuf, usize)> = Vec::new(); + + for file_path in &files { + let code = match fs::read_to_string(file_path) { + Ok(c) => c, + Err(e) => { + eprintln!(" error reading {}: {}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let ast = match full_moon::parse(&code) { + Ok(a) => a, + Err(e) => { + eprintln!(" parse error in {}: {:?}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let mut visitor = rename_aliases::RenameAliases::new(BAR_ALIASES); + let new_ast = visitor.visit_ast(ast); + + if visitor.conversions > 0 { + if !dry_run { + if let Err(e) = fs::write(file_path, new_ast.to_string()) { + eprintln!(" error writing {}: {}", file_path.display(), e); + errors += 1; + continue; + } + } + files_changed += 1; + total_conversions += visitor.conversions; + per_file.push((file_path.clone(), visitor.conversions)); + } + } + + if dry_run { + println!("bar-lua-codemod rename-aliases (DRY RUN):"); + } else { + println!("bar-lua-codemod rename-aliases results:"); + } + println!(" Files scanned: {:>7}", format_num(total_files)); + println!(" Files changed: {:>7}", format_num(files_changed)); + println!(" Conversions: {:>7}", format_num(total_conversions)); + println!(" Errors: {:>7}", format_num(errors)); + + if !per_file.is_empty() { + per_file.sort_by(|a, b| b.1.cmp(&a.1)); + println!(); + println!("Top files by conversion count:"); + for (path, count) in per_file.iter().take(20) { + let rel = path.strip_prefix(root).unwrap_or(path); + println!(" {:<60} {:>5}", rel.display(), count); + } + } +} + +const BAR_MODULES: &[&str] = &["I18N", "Utilities", "Debug", "Lava", "GetModOptionsCopy"]; + +fn run_detach_bar_modules(root: &PathBuf, excludes: &[String], dry_run: bool) { + let files = collect_lua_files(root, excludes); + let total_files = files.len(); + + if total_files == 0 { + eprintln!("No .lua files found under {}", root.display()); + process::exit(1); + } + + let mut files_changed: usize = 0; + let mut total_conversions: usize = 0; + let mut errors: usize = 0; + let mut per_file: Vec<(PathBuf, usize)> = Vec::new(); + + for file_path in &files { + let code = match fs::read_to_string(file_path) { + Ok(c) => c, + Err(e) => { + eprintln!(" error reading {}: {}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let ast = match full_moon::parse(&code) { + Ok(a) => a, + Err(e) => { + eprintln!(" parse error in {}: {:?}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let mut visitor = detach_bar_modules::DetachBarModules::new(BAR_MODULES); + let new_ast = visitor.visit_ast(ast); + + if visitor.conversions > 0 { + if !dry_run { + if let Err(e) = fs::write(file_path, new_ast.to_string()) { + eprintln!(" error writing {}: {}", file_path.display(), e); + errors += 1; + continue; + } + } + files_changed += 1; + total_conversions += visitor.conversions; + per_file.push((file_path.clone(), visitor.conversions)); + } + } + + if dry_run { + println!("bar-lua-codemod detach-bar-modules (DRY RUN):"); + } else { + println!("bar-lua-codemod detach-bar-modules results:"); + } + println!(" Modules detached: {:>7}", BAR_MODULES.join(", ")); + println!(" Files scanned: {:>7}", format_num(total_files)); + println!(" Files changed: {:>7}", format_num(files_changed)); + println!(" Conversions: {:>7}", format_num(total_conversions)); + println!(" Errors: {:>7}", format_num(errors)); + + if !per_file.is_empty() { + per_file.sort_by(|a, b| b.1.cmp(&a.1)); + println!(); + println!("Top files by conversion count:"); + for (path, count) in per_file.iter().take(20) { + let rel = path.strip_prefix(root).unwrap_or(path); + println!(" {:<60} {:>5}", rel.display(), count); + } + } +} + +fn run_spring_split(root: &PathBuf, library: &PathBuf, excludes: &[String], dry_run: bool) { + let mapping = spring_split::build_mapping(library); + let mapping_size = mapping.len(); + eprintln!( + " Loaded {} method mappings from {}", + mapping_size, + library.display() + ); + + if mapping_size == 0 { + eprintln!("No method mappings found -- check --library path"); + process::exit(1); + } + + let files = collect_lua_files(root, excludes); + let total_files = files.len(); + + if total_files == 0 { + eprintln!("No .lua files found under {}", root.display()); + process::exit(1); + } + + let mut files_changed: usize = 0; + let mut total_conversions: usize = 0; + let mut total_unmapped: usize = 0; + let mut errors: usize = 0; + let mut per_file: Vec<(PathBuf, usize)> = Vec::new(); + let mut all_unmapped: std::collections::HashMap = std::collections::HashMap::new(); + + for file_path in &files { + let code = match fs::read_to_string(file_path) { + Ok(c) => c, + Err(e) => { + eprintln!(" error reading {}: {}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let ast = match full_moon::parse(&code) { + Ok(a) => a, + Err(e) => { + eprintln!(" parse error in {}: {:?}", file_path.display(), e); + errors += 1; + continue; + } + }; + + let mut visitor = spring_split::SpringSplit::new(mapping.clone()); + let new_ast = visitor.visit_ast(ast); + + total_unmapped += visitor.unmapped; + for (name, count) in &visitor.unmapped_names { + *all_unmapped.entry(name.clone()).or_insert(0) += count; + } + + if visitor.conversions > 0 { + if !dry_run { + if let Err(e) = fs::write(file_path, new_ast.to_string()) { + eprintln!(" error writing {}: {}", file_path.display(), e); + errors += 1; + continue; + } + } + files_changed += 1; + total_conversions += visitor.conversions; + per_file.push((file_path.clone(), visitor.conversions)); + } + } + + if dry_run { + println!("bar-lua-codemod spring-split (DRY RUN):"); + } else { + println!("bar-lua-codemod spring-split results:"); + } + println!(" Method mappings loaded: {:>8}", format_num(mapping_size)); + println!(" Files scanned: {:>8}", format_num(total_files)); + println!(" Files changed: {:>8}", format_num(files_changed)); + println!(" Spring.X -> Specific.X conversions: {:>8}", format_num(total_conversions)); + println!(" Unmapped Spring.X references: {:>8}", format_num(total_unmapped)); + println!(" Errors (parse failures): {:>8}", format_num(errors)); + + if !per_file.is_empty() { + per_file.sort_by(|a, b| b.1.cmp(&a.1)); + println!(); + println!("Top files by conversion count:"); + for (path, count) in per_file.iter().take(20) { + let rel = path.strip_prefix(root).unwrap_or(path); + println!(" {:<60} {:>5}", rel.display(), count); + } + } + + if !all_unmapped.is_empty() { + let mut unmapped_sorted: Vec<_> = all_unmapped.into_iter().collect(); + unmapped_sorted.sort_by(|a, b| b.1.cmp(&a.1)); + println!(); + println!("Unmapped Spring.X methods ({} unique):", unmapped_sorted.len()); + for (name, count) in &unmapped_sorted { + println!(" {:<50} {:>5}", name, count); + } + } +} + +fn main() { + let cli = Cli::parse(); + match cli.command { + Commands::BracketToDot { + path, + exclude, + dry_run, + } => run_bracket_to_dot(&path, &exclude, dry_run), + Commands::RenameAliases { + path, + exclude, + dry_run, + } => run_rename_aliases(&path, &exclude, dry_run), + Commands::DetachBarModules { + path, + exclude, + dry_run, + } => run_detach_bar_modules(&path, &exclude, dry_run), + Commands::SpringSplit { + path, + library, + exclude, + dry_run, + } => run_spring_split(&path, &library, &exclude, dry_run), + } +} diff --git a/bar-lua-codemod/src/rename_aliases.rs b/bar-lua-codemod/src/rename_aliases.rs new file mode 100644 index 0000000..d324f61 --- /dev/null +++ b/bar-lua-codemod/src/rename_aliases.rs @@ -0,0 +1,139 @@ +use full_moon::ast::*; +use full_moon::tokenizer::*; +use full_moon::visitors::VisitorMut; +use std::collections::HashMap; + +pub struct RenameAliases { + aliases: HashMap, + pub conversions: usize, +} + +impl RenameAliases { + pub fn new(aliases: &[(&str, &str)]) -> Self { + Self { + aliases: aliases + .iter() + .map(|(old, new)| (old.to_string(), new.to_string())) + .collect(), + conversions: 0, + } + } + + /// If prefix is "Spring" and first suffix is `.OldName` where OldName is + /// in our alias map, rewrite the suffix to use the canonical name. + fn try_rewrite(&mut self, prefix: &Prefix, suffixes: &[Suffix]) -> Option> { + let Prefix::Name(token_ref) = prefix else { + return None; + }; + if token_ref.token().to_string() != "Spring" { + return None; + } + let Some(Suffix::Index(Index::Dot { dot, name })) = suffixes.first() else { + return None; + }; + let method_name = name.token().to_string(); + let canonical = self.aliases.get(&method_name)?; + self.conversions += 1; + let new_name = TokenReference::new( + name.leading_trivia().cloned().collect(), + Token::new(TokenType::Identifier { + identifier: canonical.as_str().into(), + }), + name.trailing_trivia().cloned().collect(), + ); + let mut new_suffixes = vec![Suffix::Index(Index::Dot { + dot: dot.clone(), + name: new_name, + })]; + new_suffixes.extend(suffixes[1..].iter().cloned()); + Some(new_suffixes) + } +} + +impl VisitorMut for RenameAliases { + fn visit_function_call(&mut self, call: FunctionCall) -> FunctionCall { + let suffixes: Vec = call.suffixes().cloned().collect(); + if let Some(new_suffixes) = self.try_rewrite(call.prefix(), &suffixes) { + call.with_suffixes(new_suffixes) + } else { + call + } + } + + fn visit_var(&mut self, var: Var) -> Var { + match var { + Var::Expression(var_expr) => { + let suffixes: Vec = var_expr.suffixes().cloned().collect(); + if let Some(new_suffixes) = self.try_rewrite(var_expr.prefix(), &suffixes) { + Var::Expression(Box::new(var_expr.with_suffixes(new_suffixes))) + } else { + Var::Expression(var_expr) + } + } + other => other, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use full_moon::{parse, visitors::VisitorMut}; + + const ALIASES: &[(&str, &str)] = &[ + ("GetMyTeamID", "GetLocalTeamID"), + ("GetMyAllyTeamID", "GetLocalAllyTeamID"), + ("GetMyPlayerID", "GetLocalPlayerID"), + ]; + + fn transform(input: &str) -> (String, usize) { + let ast = parse(input).expect("parse failed"); + let mut visitor = RenameAliases::new(ALIASES); + let ast = visitor.visit_ast(ast); + (ast.to_string(), visitor.conversions) + } + + #[test] + fn renames_call() { + let (out, n) = transform("local t = Spring.GetMyTeamID()"); + assert_eq!(out, "local t = Spring.GetLocalTeamID()"); + assert_eq!(n, 1); + } + + #[test] + fn renames_var_reference() { + let (out, n) = transform("local fn = Spring.GetMyAllyTeamID"); + assert_eq!(out, "local fn = Spring.GetLocalAllyTeamID"); + assert_eq!(n, 1); + } + + #[test] + fn non_alias_unchanged() { + let (out, n) = transform("Spring.GetGameFrame()"); + assert_eq!(out, "Spring.GetGameFrame()"); + assert_eq!(n, 0); + } + + #[test] + fn non_spring_unchanged() { + let (out, n) = transform("Other.GetMyTeamID()"); + assert_eq!(out, "Other.GetMyTeamID()"); + assert_eq!(n, 0); + } + + #[test] + fn preserves_trivia() { + let (out, n) = transform(" local id = Spring.GetMyPlayerID() -- get player"); + assert_eq!(out, " local id = Spring.GetLocalPlayerID() -- get player"); + assert_eq!(n, 1); + } + + #[test] + fn multiple_in_one_file() { + let input = "local a = Spring.GetMyTeamID()\nlocal b = Spring.GetMyAllyTeamID()"; + let (out, n) = transform(input); + assert!(out.contains("Spring.GetLocalTeamID()")); + assert!(out.contains("Spring.GetLocalAllyTeamID()")); + assert_eq!(n, 2); + } +} diff --git a/bar-lua-codemod/src/spring_split.rs b/bar-lua-codemod/src/spring_split.rs new file mode 100644 index 0000000..8a6322c --- /dev/null +++ b/bar-lua-codemod/src/spring_split.rs @@ -0,0 +1,237 @@ +use full_moon::ast::*; +use full_moon::tokenizer::*; +use full_moon::visitors::VisitorMut; +use std::collections::HashMap; +use std::path::Path; + +/// Scan all .lua files under `library_dir` for method declarations and build +/// a mapping of method name -> target class (SpringSynced, SpringUnsynced, +/// or SpringShared). First declaration wins if the same name appears in +/// multiple classes. +pub fn build_mapping(library_dir: &Path) -> HashMap { + let mut mapping = HashMap::new(); + let pattern = format!("{}/**/*.lua", library_dir.display()); + for entry in glob::glob(&pattern).expect("invalid glob pattern") { + let path = match entry { + Ok(p) => p, + Err(_) => continue, + }; + let content = match std::fs::read_to_string(&path) { + Ok(c) => c, + Err(_) => continue, + }; + const CLASSES: &[&str] = &["SpringSynced", "SpringUnsynced", "SpringShared"]; + for line in content.lines() { + let trimmed = line.trim(); + for &class in CLASSES { + let fn_prefix = format!("function {}.", class); + if let Some(rest) = trimmed.strip_prefix(&fn_prefix) { + if let Some(name) = rest.split('(').next() { + let name = name.trim(); + if !name.is_empty() { + mapping.entry(name.to_string()).or_insert_with(|| class.to_string()); + } + } + break; + } + let assign_prefix = format!("{}.", class); + if let Some(rest) = trimmed.strip_prefix(&assign_prefix) { + if rest.contains(" = ") { + if let Some(name) = rest.split_whitespace().next() { + mapping.entry(name.to_string()).or_insert_with(|| class.to_string()); + } + } + break; + } + } + } + } + mapping +} + +pub struct SpringSplit { + mapping: HashMap, + pub conversions: usize, + pub unmapped: usize, + pub unmapped_names: HashMap, +} + +impl SpringSplit { + pub fn new(mapping: HashMap) -> Self { + Self { + mapping, + conversions: 0, + unmapped: 0, + unmapped_names: HashMap::new(), + } + } + + fn try_rewrite(&mut self, prefix: &Prefix, first_suffix: Option<&Suffix>) -> Option { + let Prefix::Name(token_ref) = prefix else { + return None; + }; + if token_ref.token().to_string() != "Spring" { + return None; + } + let Some(Suffix::Index(Index::Dot { name, .. })) = first_suffix else { + return None; + }; + let method_name = name.token().to_string(); + if let Some(class_name) = self.mapping.get(&method_name) { + self.conversions += 1; + let new_token = TokenReference::new( + token_ref.leading_trivia().cloned().collect(), + Token::new(TokenType::Identifier { + identifier: class_name.as_str().into(), + }), + token_ref.trailing_trivia().cloned().collect(), + ); + Some(Prefix::Name(new_token)) + } else { + self.unmapped += 1; + *self.unmapped_names.entry(method_name).or_insert(0) += 1; + None + } + } +} + +impl VisitorMut for SpringSplit { + fn visit_function_call(&mut self, call: FunctionCall) -> FunctionCall { + let first = { + let mut iter = call.suffixes(); + iter.next().cloned() + }; + if let Some(new_prefix) = self.try_rewrite(call.prefix(), first.as_ref()) { + call.with_prefix(new_prefix) + } else { + call + } + } + + fn visit_var(&mut self, var: Var) -> Var { + match var { + Var::Expression(var_expr) => { + let first = { + let mut iter = var_expr.suffixes(); + iter.next().cloned() + }; + if let Some(new_prefix) = self.try_rewrite(var_expr.prefix(), first.as_ref()) { + Var::Expression(Box::new(var_expr.with_prefix(new_prefix))) + } else { + Var::Expression(var_expr) + } + } + other => other, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use full_moon::{parse, visitors::VisitorMut}; + + fn transform(input: &str, mapping: HashMap) -> (String, usize) { + let ast = parse(input).expect("parse failed"); + let mut visitor = SpringSplit::new(mapping); + let ast = visitor.visit_ast(ast); + (ast.to_string(), visitor.conversions) + } + + fn shared(methods: &[&str]) -> HashMap { + methods + .iter() + .map(|m| (m.to_string(), "SpringShared".to_string())) + .collect() + } + + fn synced(methods: &[&str]) -> HashMap { + methods + .iter() + .map(|m| (m.to_string(), "SpringSynced".to_string())) + .collect() + } + + #[test] + fn shared_call() { + let (out, n) = transform("local x = Spring.GetGameFrame()", shared(&["GetGameFrame"])); + assert_eq!(out, "local x = SpringShared.GetGameFrame()"); + assert_eq!(n, 1); + } + + #[test] + fn synced_call() { + let (out, n) = transform( + r#"Spring.CreateUnit("armcom", 0, 0, 0, 0, 0)"#, + synced(&["CreateUnit"]), + ); + assert_eq!(out, r#"SpringSynced.CreateUnit("armcom", 0, 0, 0, 0, 0)"#); + assert_eq!(n, 1); + } + + #[test] + fn var_reference() { + let (out, n) = transform("local fn = Spring.Echo", shared(&["Echo"])); + assert_eq!(out, "local fn = SpringShared.Echo"); + assert_eq!(n, 1); + } + + #[test] + fn unmapped_unchanged() { + let (out, n) = transform("Spring.UnknownMethod()", HashMap::new()); + assert_eq!(out, "Spring.UnknownMethod()"); + assert_eq!(n, 0); + } + + #[test] + fn not_spring_unchanged() { + let (out, n) = transform("Other.GetGameFrame()", shared(&["GetGameFrame"])); + assert_eq!(out, "Other.GetGameFrame()"); + assert_eq!(n, 0); + } + + #[test] + fn chained_access() { + let (out, n) = transform( + "Spring.MoveCtrl.SetLimits(unitID, 0, 0)", + synced(&["MoveCtrl"]), + ); + assert_eq!(out, "SpringSynced.MoveCtrl.SetLimits(unitID, 0, 0)"); + assert_eq!(n, 1); + } + + #[test] + fn multiple_in_one_file() { + let mut mapping = HashMap::new(); + mapping.insert("Echo".to_string(), "SpringShared".to_string()); + mapping.insert("CreateUnit".to_string(), "SpringSynced".to_string()); + let (out, n) = transform( + "Spring.Echo(\"hi\")\nSpring.CreateUnit(\"a\", 0, 0, 0, 0, 0)", + mapping, + ); + assert!(out.contains("SpringShared.Echo")); + assert!(out.contains("SpringSynced.CreateUnit")); + assert_eq!(n, 2); + } + + fn unsynced(methods: &[&str]) -> HashMap { + methods + .iter() + .map(|m| (m.to_string(), "SpringUnsynced".to_string())) + .collect() + } + + #[test] + fn unsynced_call() { + let (out, n) = transform("Spring.SendCommands(cmd)", unsynced(&["SendCommands"])); + assert_eq!(out, "SpringUnsynced.SendCommands(cmd)"); + assert_eq!(n, 1); + } + + #[test] + fn preserves_trivia() { + let (out, n) = transform(" Spring.GetGameFrame() -- get frame", shared(&["GetGameFrame"])); + assert_eq!(out, " SpringShared.GetGameFrame() -- get frame"); + assert_eq!(n, 1); + } +} diff --git a/devtools.sh b/devtools.sh index b0c933d..43e32ae 100755 --- a/devtools.sh +++ b/devtools.sh @@ -1,996 +1,32 @@ #!/usr/bin/env bash set -euo pipefail -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -COMPOSE_FILE="$SCRIPT_DIR/docker-compose.dev.yml" -COMPOSE="docker compose -f $COMPOSE_FILE" -LOBBY_DIR="$SCRIPT_DIR/bar-lobby" -REPOS_CONF="$SCRIPT_DIR/repos.conf" -REPOS_LOCAL="$SCRIPT_DIR/repos.local.conf" - -detect_game_dir() { - if [ -n "${BAR_GAME_DIR:-}" ]; then - echo "$BAR_GAME_DIR" - return 0 - fi - local xdg_state="${XDG_STATE_HOME:-$HOME/.local/state}" - local candidate="$xdg_state/Beyond All Reason" - if [ -d "$candidate" ]; then - echo "$candidate" - return 0 - fi - return 1 -} - -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -CYAN='\033[0;36m' -BOLD='\033[1m' -DIM='\033[2m' -NC='\033[0m' - -info() { echo -e "${BLUE}[info]${NC} $*"; } -ok() { echo -e "${GREEN}[ok]${NC} $*"; } -warn() { echo -e "${YELLOW}[warn]${NC} $*"; } -err() { echo -e "${RED}[error]${NC} $*"; } -step() { echo -e "${CYAN}[step]${NC} $*"; } - -# =========================================================================== -# Distro detection -# =========================================================================== - -detect_distro() { - if command -v pacman &>/dev/null; then - echo "arch" - elif command -v apt-get &>/dev/null; then - echo "debian" - elif command -v dnf &>/dev/null; then - echo "fedora" - else - echo "unknown" - fi -} - -pkg_install_cmd() { - case "$(detect_distro)" in - arch) echo "sudo pacman -S --needed" ;; - debian) echo "sudo apt install -y" ;; - fedora) echo "sudo dnf install -y" ;; - *) echo "" ;; - esac -} - -# Map generic package names to distro-specific ones -pkg_name() { - local generic="$1" - local distro - distro="$(detect_distro)" - case "${distro}:${generic}" in - arch:docker) echo "docker" ;; - arch:docker-compose) echo "docker-compose" ;; - arch:git) echo "git" ;; - arch:nodejs) echo "nodejs npm" ;; - debian:docker) echo "docker.io" ;; - debian:docker-compose) echo "docker-compose-plugin" ;; - debian:git) echo "git" ;; - debian:nodejs) echo "nodejs npm" ;; - fedora:docker) echo "docker-ce docker-ce-cli containerd.io" ;; - fedora:docker-compose) echo "docker-compose-plugin" ;; - fedora:git) echo "git" ;; - fedora:nodejs) echo "nodejs npm" ;; - *) echo "$generic" ;; - esac -} - -# =========================================================================== -# Prerequisite checks -# =========================================================================== - -check_git() { - if ! command -v git &>/dev/null; then - err "git is not installed." - return 1 - fi - ok "git $(git --version | awk '{print $3}') detected" -} - -check_docker() { - if ! command -v docker &>/dev/null; then - err "Docker is not installed." - return 1 - fi - if ! docker info &>/dev/null; then - err "Docker daemon is not running or current user lacks permissions." - echo "" - echo " Start the daemon: sudo systemctl start docker" - echo " Enable on boot: sudo systemctl enable docker" - echo " Add yourself: sudo usermod -aG docker \$USER (then re-login)" - echo "" - return 1 - fi - if ! docker compose version &>/dev/null; then - err "Docker Compose V2 plugin is not installed." - return 1 - fi - ok "Docker $(docker --version | awk '{print $3}' | tr -d ',') + Compose V2 detected" -} - -check_node() { - if ! command -v node &>/dev/null; then - warn "Node.js not found (needed for bar-lobby only)." - return 1 - fi - ok "Node.js $(node --version) detected" -} - -check_ports() { - local pg_port="${BAR_POSTGRES_PORT:-5433}" - local ports=(4000 "$pg_port" 8200 8201 8888) - local conflict=0 - for port in "${ports[@]}"; do - if ss -tlnp 2>/dev/null | grep -q ":${port} "; then - warn "Port ${port} is already in use" - conflict=1 - fi - done - if [ "$conflict" -eq 1 ]; then - warn "Some ports are in use. Services binding to those ports may fail to start." - else - ok "Required ports available (4000, ${pg_port}, 8200, 8201, 8888)" - fi -} - -check_prerequisites() { - echo -e "${BOLD}Checking prerequisites...${NC}" - echo "" - local failed=0 - check_git || failed=1 - check_docker || failed=1 - check_node || true - check_ports - echo "" - if [ "$failed" -ne 0 ]; then - err "Missing required prerequisites. Run './devtools.sh install-deps' or fix manually." - return 1 - fi -} - -# =========================================================================== -# Repository management -# =========================================================================== - -# Parse repos.conf (with repos.local.conf overrides) into parallel arrays. -# Populates: REPO_DIRS[], REPO_URLS[], REPO_BRANCHES[], REPO_GROUPS[], REPO_LOCAL_PATHS[] -declare -a REPO_DIRS=() REPO_URLS=() REPO_BRANCHES=() REPO_GROUPS=() REPO_LOCAL_PATHS=() - -load_repos_conf() { - REPO_DIRS=(); REPO_URLS=(); REPO_BRANCHES=(); REPO_GROUPS=(); REPO_LOCAL_PATHS=() - local -A seen=() - - _parse_conf() { - local file="$1" - [ -f "$file" ] || return 0 - while IFS= read -r line || [ -n "$line" ]; do - line="${line%%#*}" # strip comments - line="$(echo "$line" | xargs 2>/dev/null || true)" # trim whitespace - [ -z "$line" ] && continue - local dir url branch group local_path - read -r dir url branch group local_path <<< "$line" - [ -z "$dir" ] || [ -z "$url" ] && continue - branch="${branch:-master}" - group="${group:-extra}" - # Expand ~ in local_path - local_path="${local_path/#\~/$HOME}" - seen[$dir]="$url $branch $group $local_path" - done < "$file" - } - - _parse_conf "$REPOS_CONF" - _parse_conf "$REPOS_LOCAL" # local overrides win - - local dir - for dir in "${!seen[@]}"; do - local url branch group local_path - read -r url branch group local_path <<< "${seen[$dir]}" - REPO_DIRS+=("$dir") - REPO_URLS+=("$url") - REPO_BRANCHES+=("$branch") - REPO_GROUPS+=("$group") - REPO_LOCAL_PATHS+=("$local_path") - done -} - -clone_or_update_repo() { - local dir="$1" url="$2" branch="$3" local_path="${4:-}" target="$SCRIPT_DIR/$dir" - - if [ -n "$local_path" ]; then - if [ ! -d "$local_path" ]; then - warn " ${dir}: local path does not exist: ${local_path}" - return 1 - fi - if [ -L "$target" ]; then - local current_link - current_link="$(readlink "$target")" - if [ "$current_link" = "$local_path" ]; then - ok " ${dir}: linked -> ${local_path}" - else - warn " ${dir}: symlink points to ${current_link}, config says ${local_path}" - info " ${dir}: updating symlink..." - rm "$target" - ln -s "$local_path" "$target" - ok " ${dir}: linked -> ${local_path}" - fi - elif [ -d "$target" ]; then - warn " ${dir}: exists as a real directory but config says link to ${local_path}" - warn " ${dir}: remove it manually to use the local path" - else - ln -s "$local_path" "$target" - ok " ${dir}: linked -> ${local_path}" - fi - return 0 - fi - - if [ -d "$target/.git" ]; then - local current_url - current_url="$(git -C "$target" remote get-url origin 2>/dev/null || true)" - if [ "$current_url" != "$url" ] && [ -n "$current_url" ]; then - warn " ${dir}: origin is ${current_url}" - warn " ${dir}: config says ${url}" - warn " ${dir}: add to repos.local.conf to set your preferred remote" - fi - info " ${dir}: fetching latest..." - git -C "$target" fetch origin --quiet 2>/dev/null || warn " ${dir}: fetch failed (offline?)" - local current_branch - current_branch="$(git -C "$target" branch --show-current 2>/dev/null)" - if [ -n "$current_branch" ] && [ "$current_branch" != "$branch" ]; then - info " ${dir}: on branch '${current_branch}' (config says '${branch}')" - fi - else - info " ${dir}: cloning ${url} (branch: ${branch})..." - git clone --branch "$branch" "$url" "$target" 2>&1 | sed 's/^/ /' - fi -} - -cmd_clone() { - local group_filter="${1:-all}" - - load_repos_conf - - if [ "${#REPO_DIRS[@]}" -eq 0 ]; then - err "No repositories found in repos.conf" - exit 1 - fi - - echo -e "${BOLD}=== Cloning / Updating Repositories ===${NC}" - echo "" - - if [ -f "$REPOS_LOCAL" ]; then - info "Using overrides from repos.local.conf" - echo "" - fi - - local i cloned=0 updated=0 skipped=0 linked=0 - for i in "${!REPO_DIRS[@]}"; do - local dir="${REPO_DIRS[$i]}" - local url="${REPO_URLS[$i]}" - local branch="${REPO_BRANCHES[$i]}" - local group="${REPO_GROUPS[$i]}" - local local_path="${REPO_LOCAL_PATHS[$i]}" - - if [ "$group_filter" != "all" ] && [ "$group" != "$group_filter" ]; then - skipped=$((skipped + 1)) - continue - fi - - if [ -n "$local_path" ]; then - clone_or_update_repo "$dir" "$url" "$branch" "$local_path" - linked=$((linked + 1)) - elif [ -d "$SCRIPT_DIR/$dir/.git" ]; then - clone_or_update_repo "$dir" "$url" "$branch" - updated=$((updated + 1)) - else - clone_or_update_repo "$dir" "$url" "$branch" - cloned=$((cloned + 1)) - fi - done - - echo "" - local summary="${cloned} cloned, ${updated} updated, ${skipped} skipped" - [ "$linked" -gt 0 ] && summary+=", ${linked} linked" - ok "Repos: ${summary}" -} - -cmd_repos() { - load_repos_conf - - echo -e "${BOLD}=== Repository Status ===${NC}" - echo "" - printf " ${DIM}%-24s %-8s %-18s %s${NC}\n" "DIRECTORY" "GROUP" "BRANCH" "STATUS" - echo " $(printf '%.0s-' {1..80})" - - local i - for i in "${!REPO_DIRS[@]}"; do - local dir="${REPO_DIRS[$i]}" - local url="${REPO_URLS[$i]}" - local branch="${REPO_BRANCHES[$i]}" - local group="${REPO_GROUPS[$i]}" - local local_path="${REPO_LOCAL_PATHS[$i]}" - local target="$SCRIPT_DIR/$dir" - - local status current_branch - if [ -L "$target" ]; then - local link_dest - link_dest="$(readlink "$target")" - if [ -d "$target/.git" ]; then - current_branch="$(git -C "$target" branch --show-current 2>/dev/null || echo "detached")" - local dirty="" - if ! git -C "$target" diff --quiet 2>/dev/null || ! git -C "$target" diff --cached --quiet 2>/dev/null; then - dirty=" ${YELLOW}*dirty*${NC}" - fi - status="${CYAN}local${NC}${dirty} -> ${link_dest}" - else - status="${RED}broken link${NC} -> ${link_dest}" - current_branch="-" - fi - elif [ -d "$target/.git" ]; then - current_branch="$(git -C "$target" branch --show-current 2>/dev/null || echo "detached")" - local dirty="" - if ! git -C "$target" diff --quiet 2>/dev/null || ! git -C "$target" diff --cached --quiet 2>/dev/null; then - dirty=" ${YELLOW}*dirty*${NC}" - fi - if [ "$current_branch" = "$branch" ]; then - status="${GREEN}ok${NC}${dirty}" - else - status="${YELLOW}branch: ${current_branch}${NC}${dirty}" - fi - else - status="${RED}missing${NC}" - current_branch="-" - fi - - printf " %-24s %-8s %-18s %b\n" "$dir" "$group" "$current_branch" "$status" - done - echo "" -} - -# =========================================================================== -# Dependency installation -# =========================================================================== - -cmd_install_deps() { - echo -e "${BOLD}=== Install System Dependencies ===${NC}" - echo "" - - local distro - distro="$(detect_distro)" - local install_cmd - install_cmd="$(pkg_install_cmd)" - - if [ "$distro" = "unknown" ] || [ -z "$install_cmd" ]; then - err "Unsupported distro. Install these manually: git, docker, docker-compose, nodejs, npm" - exit 1 - fi - - info "Detected distro: ${BOLD}${distro}${NC}" - echo "" - - local missing=() - - if ! command -v git &>/dev/null; then - missing+=("git") - fi - if ! command -v docker &>/dev/null; then - missing+=("docker") - fi - if ! docker compose version &>/dev/null 2>&1; then - missing+=("docker-compose") - fi - if ! command -v node &>/dev/null; then - missing+=("nodejs") - fi - - if [ "${#missing[@]}" -eq 0 ]; then - ok "All dependencies already installed." - echo "" - - if ! docker info &>/dev/null; then - warn "Docker is installed but the daemon isn't running or you lack permissions." - echo "" - echo " sudo systemctl start docker" - echo " sudo systemctl enable docker" - echo " sudo usermod -aG docker \$USER # then re-login" - echo "" - fi - return 0 - fi - - local packages="" - for dep in "${missing[@]}"; do - packages+=" $(pkg_name "$dep")" - done - - info "Missing: ${missing[*]}" - info "Will run: ${install_cmd}${packages}" - echo "" - - read -rp "Install now? [Y/n] " confirm - if [[ "$confirm" =~ ^[Nn]$ ]]; then - echo "Skipped. Install manually and retry." - return 1 - fi - - $install_cmd $packages - - echo "" - - if [[ " ${missing[*]} " == *" docker "* ]]; then - info "Enabling and starting Docker daemon..." - sudo systemctl enable --now docker 2>/dev/null || true - - if ! groups | grep -qw docker; then - info "Adding $USER to the docker group (re-login required)..." - sudo usermod -aG docker "$USER" - warn "You need to log out and back in for Docker group membership to take effect." - warn "After re-login, run: ./devtools.sh init" - return 0 - fi - fi - - ok "Dependencies installed successfully." -} - -# =========================================================================== -# Docker helpers -# =========================================================================== - -install_dockerignore() { - local target="$SCRIPT_DIR/teiserver/.dockerignore" - local source="$SCRIPT_DIR/docker/teiserver.dockerignore" - if [ -f "$source" ] && [ ! -f "$target" ]; then - cp "$source" "$target" - info "Installed .dockerignore for teiserver build context" - fi -} - -cmd_build() { - install_dockerignore - - info "Building Docker images..." - info " - Teiserver: compiling Elixir deps + generating TLS certs" - info " - SPADS: pulling pre-built image (badosu/spads:latest)" - echo "" - $COMPOSE build teiserver - $COMPOSE --profile spads pull spads - echo "" - ok "Images built successfully." -} - -# =========================================================================== -# Engine -# =========================================================================== - -cmd_engine() { - local subcmd="${1:-}" - case "$subcmd" in - build) - shift - local build_script="$SCRIPT_DIR/RecoilEngine/docker-build-v2/build.sh" - if [ ! -f "$build_script" ]; then - err "RecoilEngine not found. Clone it first: ./devtools.sh clone extra" - exit 1 - fi - exec "$build_script" "$@" - ;; - *) - err "Usage: ./devtools.sh engine build [args...]" - echo "" - echo " Wraps RecoilEngine/docker-build-v2/build.sh with full flag pass-through." - echo "" - echo " Examples:" - echo " ./devtools.sh engine build linux" - echo " ./devtools.sh engine build linux -DCMAKE_BUILD_TYPE=Release" - echo " ./devtools.sh engine build linux -DCMAKE_BUILD_TYPE=Release -DTRACY_ENABLE=ON" - echo " ./devtools.sh engine build --help" - exit 1 - ;; - esac -} - -# =========================================================================== -# Game directory linking -# =========================================================================== - -cmd_link() { - local target="${1:-}" - local game_dir - game_dir="$(detect_game_dir 2>/dev/null)" || true - - if [ -z "$target" ]; then - echo -e "${BOLD}=== Symlink Status ===${NC}" - echo "" - if [ -z "$game_dir" ]; then - warn "Game directory not found. Set BAR_GAME_DIR env var or install BAR to the default location." - echo "" - return 0 - fi - info "Game directory: ${game_dir}" - echo "" - - local -A link_map=( - [engine]="$game_dir/engine/local-build" - [chobby]="$game_dir/games/BYAR-Chobby" - [bar]="$game_dir/games/Beyond-All-Reason" - ) - for name in engine chobby bar; do - local link_path="${link_map[$name]}" - if [ -L "$link_path" ]; then - local link_target - link_target="$(readlink -f "$link_path" 2>/dev/null || echo "?")" - printf " %-10s ${GREEN}linked${NC} -> %s\n" "$name" "$link_target" - elif [ -e "$link_path" ]; then - printf " %-10s ${YELLOW}exists (not a symlink)${NC} at %s\n" "$name" "$link_path" - else - printf " %-10s ${DIM}not linked${NC}\n" "$name" - fi - done - echo "" - return 0 - fi - - if [ -z "$game_dir" ]; then - err "Game directory not found. Set BAR_GAME_DIR env var or install BAR to the default location." - exit 1 - fi - - local source_path link_path - case "$target" in - engine) - source_path="$SCRIPT_DIR/RecoilEngine/build-linux/install" - link_path="$game_dir/engine/local-build" - ;; - chobby) - source_path="$SCRIPT_DIR/BYAR-Chobby" - link_path="$game_dir/games/BYAR-Chobby" - ;; - bar) - source_path="$SCRIPT_DIR/Beyond-All-Reason" - link_path="$game_dir/games/Beyond-All-Reason" - ;; - *) - err "Unknown link target: $target" - echo " Valid targets: engine, chobby, bar" - exit 1 - ;; - esac - - if [ ! -e "$source_path" ] && [ ! -L "$source_path" ]; then - err "Source not found: $source_path" - if [ "$target" = "engine" ]; then - echo " Build the engine first: ./devtools.sh engine build linux" - else - echo " Clone the repo first: ./devtools.sh clone extra" - fi - exit 1 - fi - - if [ -L "$link_path" ]; then - info "Replacing existing symlink at $link_path" - rm "$link_path" - elif [ -e "$link_path" ]; then - warn "$link_path already exists and is not a symlink. Skipping." - warn "Remove it manually if you want to replace it." - return 1 - fi - - mkdir -p "$(dirname "$link_path")" - ln -s "$source_path" "$link_path" - ok "Linked $target: $link_path -> $source_path" -} - -# =========================================================================== -# Main commands -# =========================================================================== - -cmd_init() { - echo -e "${BOLD}==========================================${NC}" - echo -e "${BOLD} BAR Dev Environment - First Time Setup${NC}" - echo -e "${BOLD}==========================================${NC}" - echo "" - - step "1/5 Checking & installing dependencies" - echo "" - local deps_ok=0 - if check_git &>/dev/null && check_docker &>/dev/null; then - deps_ok=1 - ok "Core dependencies (git, docker) already installed." - check_node || true - else - cmd_install_deps || { err "Dependency installation failed. Fix and retry."; exit 1; } - deps_ok=1 - fi - echo "" - - step "2/5 Cloning repositories" - echo "" - if [ ! -f "$REPOS_CONF" ]; then - err "repos.conf not found at: $REPOS_CONF" - exit 1 - fi - cmd_clone core - echo "" - - read -rp "Also clone extra repositories (game engine, SPADS source, infra)? [y/N] " extras - if [[ "$extras" =~ ^[Yy]$ ]]; then - cmd_clone extra - echo "" - fi - - step "3/5 Building Docker images" - echo "" - cmd_build - echo "" - - local do_build_engine=0 - if [ -d "$SCRIPT_DIR/RecoilEngine/docker-build-v2" ]; then - step "4/5 Engine build" - echo "" - read -rp "Build engine from source? [y/N] " build_engine - if [[ "$build_engine" =~ ^[Yy]$ ]]; then - do_build_engine=1 - info "Building Recoil engine (this may take a while)..." - "$SCRIPT_DIR/RecoilEngine/docker-build-v2/build.sh" linux - fi - echo "" - else - step "4/5 Engine build" - echo "" - info "RecoilEngine not cloned -- skipping. Clone with: ./devtools.sh clone extra" - echo "" - fi - - step "5/5 Symlinks to game directory" - echo "" - local game_dir - game_dir="$(detect_game_dir 2>/dev/null)" || true - if [ -z "$game_dir" ]; then - info "No game directory detected. Set BAR_GAME_DIR to enable linking." - echo "" - else - local available=() - if [ -d "$SCRIPT_DIR/RecoilEngine" ]; then - available+=("engine") - fi - if [ -d "$SCRIPT_DIR/BYAR-Chobby" ]; then - available+=("chobby") - fi - if [ -d "$SCRIPT_DIR/Beyond-All-Reason" ]; then - available+=("bar") - fi - - if [ "${#available[@]}" -gt 0 ]; then - echo " Available repos to symlink into $game_dir:" - for name in "${available[@]}"; do - case "$name" in - engine) echo -e " ${BOLD}engine${NC} -> $game_dir/engine/local-build/" ;; - chobby) echo -e " ${BOLD}chobby${NC} -> $game_dir/games/BYAR-Chobby/" ;; - bar) echo -e " ${BOLD}bar${NC} -> $game_dir/games/Beyond-All-Reason/" ;; - esac - done - echo "" - warn "This will replace any existing directories at these paths with symlinks." - read -rp "Symlink all? [y/N] " do_link - if [[ "$do_link" =~ ^[Yy]$ ]]; then - BAR_GAME_DIR="$game_dir" - for name in "${available[@]}"; do - cmd_link "$name" - done - fi - else - info "No linkable repos cloned yet." - fi - fi - echo "" - - echo -e "${BOLD}=== Setup Complete ===${NC}" - echo "" - echo " Your workspace is ready. Next steps:" - echo "" - echo -e " ${BOLD}./devtools.sh up${NC} Start Teiserver + PostgreSQL" - echo -e " ${BOLD}./devtools.sh up lobby${NC} ...and launch bar-lobby" - echo -e " ${BOLD}./devtools.sh up spads${NC} ...and start SPADS autohost" - echo -e " ${BOLD}./devtools.sh engine build${NC} Build the Recoil engine" - echo -e " ${BOLD}./devtools.sh link${NC} Show symlink status" - echo -e " ${BOLD}./devtools.sh repos${NC} Show repository status" - echo "" - echo " To use your own forks, copy repos.conf to repos.local.conf" - echo " and edit the URLs/branches. Then run: ./devtools.sh clone" - echo "" -} - -cmd_setup() { - echo -e "${BOLD}=== BAR Dev Environment Setup ===${NC}" - echo "" - check_prerequisites || exit 1 - - local missing_core=0 - load_repos_conf - for i in "${!REPO_DIRS[@]}"; do - if [ "${REPO_GROUPS[$i]}" = "core" ] && [ ! -d "$SCRIPT_DIR/${REPO_DIRS[$i]}/.git" ]; then - missing_core=1 - break - fi - done - - if [ "$missing_core" -eq 1 ]; then - warn "Core repositories are missing. Cloning them now..." - echo "" - cmd_clone core - echo "" - fi - - cmd_build - - echo "" - echo -e " Next steps:" - echo -e " ${BOLD}./devtools.sh up${NC} Start all services" - echo -e " ${BOLD}./devtools.sh up lobby${NC} Start all services + bar-lobby" - echo "" -} - -cmd_up() { - local start_lobby=0 - local with_spads=0 - for arg in "$@"; do - case "$arg" in - lobby|--lobby) start_lobby=1 ;; - spads|--spads) with_spads=1 ;; - esac - done - - install_dockerignore - - if [ "$with_spads" -eq 1 ]; then - info "Starting PostgreSQL, Teiserver, and SPADS..." - $COMPOSE --profile spads up -d --build - else - info "Starting PostgreSQL and Teiserver..." - $COMPOSE up -d --build - fi - - echo "" - info "Waiting for Teiserver to become healthy (first run takes several minutes)..." - echo " Follow progress: ./devtools.sh logs teiserver" - echo "" - - local attempts=0 - local max_attempts=120 - while [ $attempts -lt $max_attempts ]; do - local health - health=$($COMPOSE ps teiserver --format '{{.Health}}' 2>/dev/null || echo "unknown") - case "$health" in - healthy) - ok "Teiserver is healthy!" - break - ;; - unhealthy) - err "Teiserver failed to start. Check logs: ./devtools.sh logs teiserver" - exit 1 - ;; - *) - sleep 5 - attempts=$((attempts + 1)) - if [ $((attempts % 6)) -eq 0 ]; then - info "Still waiting... (${attempts}/${max_attempts}) - health: ${health}" - fi - ;; - esac - done - - if [ $attempts -ge $max_attempts ]; then - err "Timed out waiting for Teiserver. Check logs: ./devtools.sh logs teiserver" - exit 1 - fi - - echo "" - echo -e "${BOLD}=== Services Running ===${NC}" - echo "" - echo -e " ${GREEN}Teiserver Web UI${NC} http://localhost:4000" - echo -e " ${GREEN}Teiserver HTTPS${NC} https://localhost:8888" - echo -e " ${GREEN}Spring Protocol${NC} localhost:8200 (TCP) / :8201 (TLS)" - echo -e " ${GREEN}PostgreSQL${NC} localhost:${BAR_POSTGRES_PORT:-5433}" - echo "" - echo -e " ${BOLD}Login:${NC} root@localhost / password" - echo -e " ${BOLD}SPADS bot:${NC} spadsbot / password" - if [ "$with_spads" -eq 1 ]; then - echo "" - echo -e " SPADS is starting (check: ./devtools.sh logs spads)" - fi - echo "" - - if [ "$start_lobby" -eq 1 ]; then - cmd_lobby - fi -} - -cmd_down() { - info "Stopping all services..." - $COMPOSE --profile spads down - ok "All services stopped." -} - -cmd_status() { - echo -e "${BOLD}=== Service Status ===${NC}" - echo "" - $COMPOSE --profile spads ps -a -} - -cmd_logs() { - local service="${1:-}" - if [ -z "$service" ]; then - $COMPOSE --profile spads logs -f --tail=100 - else - $COMPOSE --profile spads logs -f --tail=100 "$service" - fi -} - -cmd_lobby() { - if [ ! -d "$LOBBY_DIR" ]; then - err "bar-lobby directory not found at: $LOBBY_DIR" - err "Run './devtools.sh clone' to clone repositories first." - exit 1 - fi - - if ! command -v node &>/dev/null; then - err "Node.js is required for bar-lobby. Run './devtools.sh install-deps'." - exit 1 - fi - - info "Installing bar-lobby dependencies..." - cd "$LOBBY_DIR" - npm install - - info "Starting bar-lobby dev server..." - echo " (Ctrl+C to stop the lobby; Docker services keep running)" - echo "" - - __NV_PRIME_RENDER_OFFLOAD=1 \ - __GLX_VENDOR_LIBRARY_NAME=nvidia \ - LC_CTYPE=C \ - npm start -- -- --no-sandbox -} - -cmd_reset() { - echo -e "${YELLOW}${BOLD}This will destroy all data (database, SPADS state, engine cache).${NC}" - read -rp "Are you sure? [y/N] " confirm - if [[ ! "$confirm" =~ ^[Yy]$ ]]; then - echo "Aborted." - exit 0 - fi - - info "Stopping services and removing volumes..." - $COMPOSE --profile spads down -v - - info "Rebuilding images from scratch..." - $COMPOSE build --no-cache teiserver - $COMPOSE --profile spads pull spads - - ok "Reset complete. Run './devtools.sh up' to start fresh." -} - -cmd_shell() { - local service="${1:-teiserver}" - info "Opening shell in ${service}..." - $COMPOSE --profile spads exec "$service" bash -} - -cmd_update() { - echo -e "${BOLD}=== Updating All Repositories ===${NC}" - echo "" - load_repos_conf - - local i - for i in "${!REPO_DIRS[@]}"; do - local dir="${REPO_DIRS[$i]}" - local target="$SCRIPT_DIR/$dir" - if [ -d "$target/.git" ]; then - local branch - branch="$(git -C "$target" branch --show-current 2>/dev/null)" - info "${dir}: pulling ${branch}..." - git -C "$target" pull --ff-only 2>&1 | sed 's/^/ /' || warn " ${dir}: pull failed (conflicts?)" - fi - done - echo "" - ok "Update complete." -} - -# =========================================================================== -# Help -# =========================================================================== - -show_help() { - echo -e "${BOLD}BAR Development Environment${NC}" - echo "" - echo "Usage: ./devtools.sh [args]" - echo "" - echo -e "${BOLD}Getting Started (new developer):${NC}" - echo " init Full first-time setup: install deps, clone repos, build images" - echo " install-deps Install system packages (docker, git, nodejs)" - echo "" - echo -e "${BOLD}Services:${NC}" - echo " setup Check prerequisites and build Docker images" - echo " up [options] Start services. Options: lobby, spads" - echo " down Stop all services" - echo " status Show running services" - echo " logs [service] Tail logs (postgres, teiserver, spads, or all)" - echo " lobby Start bar-lobby dev server" - echo " reset Destroy all data and rebuild from scratch" - echo " shell [svc] Open a shell in a container (default: teiserver)" - echo "" - echo -e "${BOLD}Engine:${NC}" - echo " engine build [args] Build Recoil engine via docker-build-v2" - echo "" - echo -e "${BOLD}Game Directory:${NC}" - echo " link [target] Symlink repos into game directory (engine, chobby, bar)" - echo " With no target, shows status of all links" - echo "" - echo -e "${BOLD}Repositories:${NC}" - echo " clone [group] Clone/update repos (group: core, extra, or all)" - echo " repos Show status of all configured repositories" - echo " update Pull latest on all cloned repositories" - echo "" - echo -e "${BOLD}Examples:${NC}" - echo " ./devtools.sh init # New developer? Start here" - echo " ./devtools.sh up # Start postgres + teiserver" - echo " ./devtools.sh up lobby # Start stack + bar-lobby" - echo " ./devtools.sh up spads lobby # Start everything" - echo " ./devtools.sh engine build linux # Build engine for linux" - echo " ./devtools.sh link # Show symlink status" - echo " ./devtools.sh link engine # Symlink engine to game dir" - echo " ./devtools.sh repos # Check repo status" - echo " ./devtools.sh clone extra # Clone optional repos" - echo " ./devtools.sh logs teiserver # Follow Teiserver logs" - echo "" - echo -e "${BOLD}Configuration:${NC}" - echo " repos.conf Default repository URLs and branches" - echo " repos.local.conf Personal overrides (forks, branches) -- gitignored" - echo " BAR_GAME_DIR Env var: path to BAR game data directory (auto-detected if unset)" - echo "" - echo " To use your own fork of teiserver:" - echo " cp repos.conf repos.local.conf" - echo " # Edit repos.local.conf: change teiserver URL to your fork" - echo " ./devtools.sh clone core" - echo "" - echo -e "${BOLD}Docker Services:${NC}" - echo " postgres PostgreSQL 16 database" - echo " teiserver Elixir lobby server (HTTP :4000, Spring :8200/:8201)" - echo " spads Perl autohost (optional, needs game data)" - echo " bar-lobby Electron game client (runs natively, not in Docker)" - echo "" -} - -# =========================================================================== -# Dispatch -# =========================================================================== - -case "${1:-help}" in - init) cmd_init ;; - install-deps) cmd_install_deps ;; - setup) cmd_setup ;; - up) shift; cmd_up "$@" ;; - down) cmd_down ;; - status) cmd_status ;; - logs) cmd_logs "${2:-}" ;; - lobby) cmd_lobby ;; - reset) cmd_reset ;; - shell) cmd_shell "${2:-teiserver}" ;; - clone) cmd_clone "${2:-all}" ;; - repos) cmd_repos ;; - update) cmd_update ;; - build) cmd_build ;; - engine) shift; cmd_engine "$@" ;; - link) cmd_link "${2:-}" ;; - help|--help|-h) show_help ;; - *) err "Unknown command: $1"; echo ""; show_help; exit 1 ;; -esac +cat <<'EOF' +devtools.sh has been replaced by just recipes. + +Install just: + Arch: pacman -S just + Fedora: dnf install just + Debian/Ubuntu: apt install just + +Then run `just` to see all available commands. + +Command mapping: + ./devtools.sh init -> just setup::init + ./devtools.sh install-deps -> just setup::deps + ./devtools.sh up [lobby|spads] -> just services::up [lobby|spads] + ./devtools.sh down -> just services::down + ./devtools.sh status -> just services::status + ./devtools.sh logs [service] -> just services::logs [service] + ./devtools.sh lobby -> just services::lobby + ./devtools.sh shell [service] -> just services::shell [service] + ./devtools.sh build -> just services::build + ./devtools.sh reset -> just services::reset + ./devtools.sh clone [group] -> just repos::clone [group] + ./devtools.sh repos -> just repos::status + ./devtools.sh update -> just repos::update + ./devtools.sh engine build -> just engine::build + ./devtools.sh link -> just link::status + ./devtools.sh link -> just link::create +EOF +exit 1 diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 94603dc..1a8cb62 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -78,6 +78,27 @@ services: - spads_cache:/opt/spads/var - spring_engines:/spring-engines + bar: + build: + context: . + dockerfile: docker/bar.Dockerfile + profiles: ["test"] + volumes: + - ./Beyond-All-Reason:/bar:z + - /bar/.lux + working_dir: /bar + + recoil-docs: + build: + context: ./RecoilEngine/doc/site + platform: linux/amd64 + profiles: ["docs"] + volumes: + - ./RecoilEngine:/recoil:z + working_dir: /recoil/doc/site + ports: + - "1313:1313" + volumes: pgdata: devtools_state: diff --git a/docker/bar.Dockerfile b/docker/bar.Dockerfile new file mode 100644 index 0000000..26cb8f8 --- /dev/null +++ b/docker/bar.Dockerfile @@ -0,0 +1,16 @@ +FROM ubuntu:24.04 +ARG LUX_VERSION=latest +RUN apt-get update && apt-get install -y --no-install-recommends \ + lua5.1 liblua5.1-dev libreadline-dev \ + build-essential git ca-certificates curl libgpgme11t64 jq \ + && DEB_ARCH=$(dpkg --print-architecture) \ + && DEB_URL=$(curl -fsSL "https://api.github.com/repos/lumen-oss/lux/releases/${LUX_VERSION}" \ + | jq -r --arg arch "$DEB_ARCH" '.assets[] | select(.name | test("_" + $arch + "\\.deb$")) | .browser_download_url') \ + && curl -fsSL "$DEB_URL" -o /tmp/lux.deb \ + && dpkg -i /tmp/lux.deb \ + && rm /tmp/lux.deb \ + && apt-get purge -y jq && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* +RUN lx install-lua \ + && ln -sf /usr/bin/lua5.1 /root/.local/share/lux/tree/5.1/.lua/bin/lua +WORKDIR /bar diff --git a/just/bar.just b/just/bar.just new file mode 100644 index 0000000..69b5dea --- /dev/null +++ b/just/bar.just @@ -0,0 +1,180 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +COMPOSE_FILE := DEVTOOLS_DIR / "docker-compose.dev.yml" +COMPOSE := "docker compose -f " + COMPOSE_FILE +BAR_DIR := DEVTOOLS_DIR / "Beyond-All-Reason" +INTEGRATION_COMPOSE := "docker compose -f " + BAR_DIR / "tools" / "headless_testing" / "docker-compose.yml" +CODEMOD_DIR := DEVTOOLS_DIR / "bar-lua-codemod" +CODEMOD_BIN := CODEMOD_DIR / "target" / "release" / "bar-lua-codemod" +LUALS_VERSION := "3.17.1" +LUALS_DIR := DEVTOOLS_DIR / ".devtools" / "lua-language-server" +LUALS_BIN := LUALS_DIR / "bin" / "lua-language-server" + +[private] +require-bar: + #!/usr/bin/env bash + if [ ! -d "{{BAR_DIR}}" ]; then + echo "Error: Beyond-All-Reason is not cloned." >&2 + echo "Run: just repos::clone extra" >&2 + exit 1 + fi + +[private] +require-luals: + #!/usr/bin/env bash + [ -x "{{LUALS_BIN}}" ] && exit 0 + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + step "lua-language-server not found, downloading v{{LUALS_VERSION}}..." + mkdir -p "{{LUALS_DIR}}" + ARCH="$(uname -m)" + case "$ARCH" in + x86_64) PLAT="linux-x64" ;; + aarch64) PLAT="linux-arm64" ;; + *) err "Unsupported arch: $ARCH"; exit 1 ;; + esac + curl -fsSL "https://github.com/LuaLS/lua-language-server/releases/download/${LUALS_VERSION}/lua-language-server-${LUALS_VERSION}-${PLAT}.tar.gz" \ + | tar xz -C "{{LUALS_DIR}}" + ok "Downloaded lua-language-server v{{LUALS_VERSION}}" + +# Type-check BAR Lua code (LuaLS diagnostics) +check *args: require-bar require-luals + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + step "Running LuaLS type check..." + cd "$BAR_DIR" + "{{LUALS_BIN}}" --check=. --checklevel=Warning --logpath=/tmp/luals-check {{args}} + ok "Type check complete" + +# Lint BAR Lua code (luacheck via lux) +lint *args: require-bar + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + enter_distrobox + (cd "$BAR_DIR" && lx --lua-version 5.1 lint {{args}}) + +# Format BAR Lua code (stylua via lux) +fmt *args: require-bar + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + enter_distrobox + (cd "$BAR_DIR" && lx --lua-version 5.1 exec stylua . {{args}}) + +# Run busted unit tests +units *args: require-bar + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + enter_distrobox + (cd "$BAR_DIR" && lx --lua-version 5.1 test {{args}}) + +# Drop into an interactive test shell (busted on PATH) +test-shell: require-bar + #!/usr/bin/env bash + set -e + source "$DEVTOOLS_DIR/scripts/common.sh" + cd "$BAR_DIR" + echo -e "${GREEN}[ok]${NC} Entering lx test shell (busted is available)." + echo -e "${GREEN}[ok]${NC} Type 'exit' to return." + if [ -n "${DEVTOOLS_DISTROBOX:-}" ] && [ -z "${_DEVTOOLS_IN_DISTROBOX:-}" ] && [ ! -f /run/.containerenv ]; then + exec script -qec "distrobox enter '${DEVTOOLS_DISTROBOX}' -- lx --lua-version 5.1 shell --test --no-loader" /dev/null + fi + exec lx --lua-version 5.1 shell --test --no-loader + +# Run headless integration tests (x86-64 only) +integrations *args: require-bar + #!/usr/bin/env bash + if [[ "$(uname -m)" == arm64 ]]; then + echo "Error: integration tests require an x86-64 host." >&2 + echo "The Spring engine used by headless tests has no arm64 build." >&2 + exit 1 + fi + {{INTEGRATION_COMPOSE}} up --build --abort-on-container-exit {{args}} + +# Run all BAR tests (unit + integrations) +test: units integrations + +# Build bar-lua-codemod binary (requires cargo or rust-dev distrobox) +codemod-build: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + step "Building bar-lua-codemod..." + bash "$DEVTOOLS_DIR/scripts/codemod-cargo.sh" build --release + ok "Built: {{CODEMOD_BIN}}" + +# Run bar-lua-codemod unit tests +codemod-test: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + step "Testing bar-lua-codemod..." + bash "$DEVTOOLS_DIR/scripts/codemod-cargo.sh" test + ok "All tests passed" + +[private] +require-codemod: + #!/usr/bin/env bash + [ -x "{{CODEMOD_BIN}}" ] && exit 0 + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + step "bar-lua-codemod not found, building..." + bash "$DEVTOOLS_DIR/scripts/codemod-cargo.sh" build --release + ok "Built bar-lua-codemod" + +# Run codemod transforms + stylua (contributors run after rebasing) +fmt-mig: require-bar require-codemod + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + enter_distrobox + step "bracket-to-dot transform..." + "{{CODEMOD_BIN}}" bracket-to-dot --path "$BAR_DIR" --exclude common/luaUtilities + step "rename-aliases transform..." + "{{CODEMOD_BIN}}" rename-aliases --path "$BAR_DIR" --exclude common/luaUtilities + step "detach-bar-modules transform..." + "{{CODEMOD_BIN}}" detach-bar-modules --path "$BAR_DIR" --exclude common/luaUtilities + step "spring-split transform..." + "{{CODEMOD_BIN}}" spring-split --path "$BAR_DIR" --library "$BAR_DIR/recoil-lua-library/library" --exclude common/luaUtilities + step "stylua..." + (cd "$BAR_DIR" && lx --lua-version 5.1 exec stylua .) + ok "fmt-mig complete" + +[private] +require-library: + just lua::library + +# Regenerate fmt, mig-*, and mig branches from origin/master +fmt-mig-generate *args: require-bar require-codemod require-library + bash "$DEVTOOLS_DIR/scripts/generate-branches.sh" {{args}} + +# Install git pre-commit hook in the BAR repo +setup-hooks: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + hook="$BAR_DIR/.git/hooks/pre-commit" + if [ ! -d "$BAR_DIR/.git" ]; then + err "BAR repo not found at $BAR_DIR" + info "Clone it first: just repos::clone extra" + exit 1 + fi + mkdir -p "$(dirname "$hook")" + printf '%s\n' \ + '#!/usr/bin/env bash' \ + 'set -e' \ + 'echo "[pre-commit] Running stylua..."' \ + 'lx --lua-version 5.1 exec stylua .' \ + 'echo "[pre-commit] Running lx lint..."' \ + 'lx --lua-version 5.1 lint' \ + > "$hook" + chmod +x "$hook" + ok "Installed pre-commit hook at $hook" + if [ -f "$BAR_DIR/.git-blame-ignore-revs" ]; then + git -C "$BAR_DIR" config blame.ignoreRevsFile .git-blame-ignore-revs + ok "Configured git blame to ignore formatting commits" + fi diff --git a/just/docs.just b/just/docs.just new file mode 100644 index 0000000..a4c7632 --- /dev/null +++ b/just/docs.just @@ -0,0 +1,47 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +RECOIL_DIR := DEVTOOLS_DIR / "RecoilEngine" +COMPOSE_FILE := DEVTOOLS_DIR / "docker-compose.dev.yml" +COMPOSE := "docker compose -f " + COMPOSE_FILE + +# Generate lua library locally, then build docs and start Hugo dev server +server: + just lua::library + {{COMPOSE}} run --rm --service-ports recoil-docs server_dev -- --bind 0.0.0.0 + +# Generate lua library locally, then build Lua API pages only +generate: + just lua::library + {{COMPOSE}} run --rm recoil-docs lua_pages_only + +# Start Hugo dev server without regenerating +server-only: + {{COMPOSE}} run --rm --service-ports recoil-docs server -- --bind 0.0.0.0 + +# Full pipeline inside Docker (uses npm lua-doc-extractor, for CI-like builds) +generate-full: + {{COMPOSE}} run --rm recoil-docs lua_pages + +# Full pipeline inside Docker then serve (uses npm lua-doc-extractor) +server-full: + {{COMPOSE}} run --rm --service-ports recoil-docs server_full -- --bind 0.0.0.0 + +# Rebuild the docs container image +build: + {{COMPOSE}} build recoil-docs + +# TODO: Same workaround as lua::reset. doc/site/data/* are pipeline outputs tracked in git, so +# local docs generation dirties the tree. Prefer these as build-only artifacts (CI publishes +# them) rather than tracked outputs developers must constantly revert. + +# Reset generated doc data files +reset: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + info "Resetting doc site data in RecoilEngine..." + cd "$RECOIL_DIR" + git checkout -- doc/site/data/ + git clean -fd doc/site/data/ + ok "Doc data reset." diff --git a/just/engine.just b/just/engine.just new file mode 100644 index 0000000..1cabdfd --- /dev/null +++ b/just/engine.just @@ -0,0 +1,32 @@ +set export + +DEVTOOLS_DIR := justfile_directory() + +# Build Recoil engine via docker-build-v2 +build *args: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + build_script="$DEVTOOLS_DIR/RecoilEngine/docker-build-v2/build.sh" + if [ ! -f "$build_script" ]; then + err "RecoilEngine not found. Clone it first: just repos::clone extra" + exit 1 + fi + if [ -z "{{args}}" ]; then + err "Usage: just engine::build [cmake-args...]" + echo "" + echo " Examples:" + echo " just engine::build linux" + echo " just engine::build linux -DCMAKE_BUILD_TYPE=Release" + echo " just engine::build linux -DTRACY_ENABLE=ON" + echo " just engine::build --help" + exit 1 + fi + arch_args="" + if [[ ! " {{args}} " =~ " --arch " ]]; then + case "$(uname -m)" in + x86_64) arch_args="--arch amd64" ;; + aarch64|arm64) arch_args="--arch arm64" ;; + esac + fi + exec bash "$build_script" $arch_args {{args}} diff --git a/just/link.just b/just/link.just new file mode 100644 index 0000000..71a58c1 --- /dev/null +++ b/just/link.just @@ -0,0 +1,19 @@ +set export + +DEVTOOLS_DIR := justfile_directory() + +# Show symlink status for game directory +status: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + cmd_link + +# Symlink a repo into the game directory (engine, chobby, or bar) +create target: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + cmd_link "{{target}}" diff --git a/just/lua.just b/just/lua.just new file mode 100644 index 0000000..6c3c0e8 --- /dev/null +++ b/just/lua.just @@ -0,0 +1,80 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +RECOIL_DIR := DEVTOOLS_DIR / "RecoilEngine" +BAR_DIR := DEVTOOLS_DIR / "Beyond-All-Reason" +LDE_DIR := DEVTOOLS_DIR / "lua-doc-extractor" + +# Build lua-doc-extractor from local checkout +build-lde: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + enter_distrobox + if [ ! -d "$LDE_DIR" ]; then + err "lua-doc-extractor not found at $LDE_DIR" + info "Clone it first: just repos::clone extra" + exit 1 + fi + cd "$LDE_DIR" + npm ci && npm run build + ok "lua-doc-extractor built" + +# Generate Lua library from RecoilEngine sources, copy into BAR submodule +library *flags: build-lde + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + LIB="$RECOIL_DIR/rts/Lua/library" + DEST="$LIB/generated" + + info "Cleaning stale generated files..." + clean_dir "$DEST" + clean_dir "$LIB/RecoilEngine" + + enter_distrobox + LDE="node $LDE_DIR/dist/src/cli.js" + + info "Extracting Lua docs..." + $LDE \ + --src "$RECOIL_DIR/rts/{Lua,Rml/SolLua}/**/*.cpp" \ + --dest "$DEST" \ + --repo "https://github.com/beyond-all-reason/RecoilEngine/blob/master" \ + {{flags}} + + if [ -d "$BAR_DIR/recoil-lua-library/library" ]; then + info "Copying into BAR submodule..." + clean_dir "$BAR_DIR/recoil-lua-library/library" + mkdir -p "$BAR_DIR/recoil-lua-library/library" + cp -r "$RECOIL_DIR/rts/Lua/library/"* \ + "$BAR_DIR/recoil-lua-library/library/" + ok "Updated $BAR_DIR/recoil-lua-library/library/" + echo " Run 'just lua::reset' to reset Recoil library and BAR recoil-lua-library." + fi + +# Generate library then restart LuaLS so the editor picks up changes +library-reload *flags: (library flags) + -pkill -f lua-language-server + @echo "LuaLS restarting (editor extension will respawn it)" + +# TODO: This is a workaround for the fact that these CI artifacts are in source control and +# generally developers don't want generated output dirtying PRs that only change inputs. +# Prefer recoil-lua-library NOT be a submodule but a local build artifact produced by our +# scripting layer, with CI building and publishing the package without committing it back. + +# Reset generated Lua library files +reset: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + info "Resetting Lua library in RecoilEngine..." + cd "$RECOIL_DIR" + git checkout -- rts/Lua/library/ + git clean -fd rts/Lua/library/ + if [ -d "$BAR_DIR/recoil-lua-library" ]; then + info "Resetting recoil-lua-library submodule in BAR..." + cd "$BAR_DIR" + git submodule update --init --force recoil-lua-library + git -C recoil-lua-library clean -fd + fi + ok "Lua library reset." diff --git a/just/repos.just b/just/repos.just new file mode 100644 index 0000000..c59229b --- /dev/null +++ b/just/repos.just @@ -0,0 +1,29 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +REPOS_CONF := DEVTOOLS_DIR / "repos.conf" +REPOS_LOCAL := DEVTOOLS_DIR / "repos.local.conf" + +# Clone or update repos (group: core, extra, or all) +clone group="all": + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/repos.sh" + cmd_clone "{{group}}" + +# Show status of all configured repositories +status: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/repos.sh" + cmd_repos + +# Pull latest on all cloned repositories +update: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/repos.sh" + cmd_update diff --git a/just/services.just b/just/services.just new file mode 100644 index 0000000..df7d68e --- /dev/null +++ b/just/services.just @@ -0,0 +1,163 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +COMPOSE_FILE := DEVTOOLS_DIR / "docker-compose.dev.yml" +COMPOSE := "docker compose -f " + COMPOSE_FILE +LOBBY_DIR := DEVTOOLS_DIR / "bar-lobby" + +# Start dev services. Options: lobby, spads +up *args: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + install_dockerignore + + start_lobby=0 + with_spads=0 + for arg in {{args}}; do + case "$arg" in + lobby|--lobby) start_lobby=1 ;; + spads|--spads) with_spads=1 ;; + esac + done + + if [ "$with_spads" -eq 1 ]; then + info "Starting PostgreSQL, Teiserver, and SPADS..." + $COMPOSE --profile spads up -d --build + else + info "Starting PostgreSQL and Teiserver..." + $COMPOSE up -d --build + fi + + echo "" + info "Waiting for Teiserver to become healthy (first run takes several minutes)..." + echo " Follow progress: just services::logs teiserver" + echo "" + + attempts=0 + max_attempts=120 + while [ $attempts -lt $max_attempts ]; do + health=$($COMPOSE ps teiserver --format '{{{{.Health}}}}' 2>/dev/null || echo "unknown") + case "$health" in + healthy) + ok "Teiserver is healthy!" + break + ;; + unhealthy) + err "Teiserver failed to start. Check logs: just services::logs teiserver" + exit 1 + ;; + *) + sleep 5 + attempts=$((attempts + 1)) + if [ $((attempts % 6)) -eq 0 ]; then + info "Still waiting... (${attempts}/${max_attempts}) - health: ${health}" + fi + ;; + esac + done + + if [ $attempts -ge $max_attempts ]; then + err "Timed out waiting for Teiserver. Check logs: just services::logs teiserver" + exit 1 + fi + + echo "" + echo -e "${BOLD}=== Services Running ===${NC}" + echo "" + echo -e " ${GREEN}Teiserver Web UI${NC} http://localhost:4000" + echo -e " ${GREEN}Teiserver HTTPS${NC} https://localhost:8888" + echo -e " ${GREEN}Spring Protocol${NC} localhost:8200 (TCP) / :8201 (TLS)" + echo -e " ${GREEN}PostgreSQL${NC} localhost:${BAR_POSTGRES_PORT:-5433}" + echo "" + echo -e " ${BOLD}Login:${NC} root@localhost / password" + echo -e " ${BOLD}SPADS bot:${NC} spadsbot / password" + if [ "$with_spads" -eq 1 ]; then + echo "" + echo -e " SPADS is starting (check: just services::logs spads)" + fi + echo "" + + if [ "$start_lobby" -eq 1 ]; then + just services::lobby + fi + +# Stop all services +down: + $COMPOSE --profile spads down + +# Show running services +status: + #!/usr/bin/env bash + echo -e "\033[1m=== Service Status ===\033[0m" + echo "" + $COMPOSE --profile spads ps -a + +# Tail service logs +logs service="": + #!/usr/bin/env bash + if [ -z "{{service}}" ]; then + $COMPOSE --profile spads logs -f --tail=100 + else + $COMPOSE --profile spads logs -f --tail=100 "{{service}}" + fi + +# Start bar-lobby dev server +lobby: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + enter_distrobox + if [ ! -d "$LOBBY_DIR" ]; then + err "bar-lobby directory not found at: $LOBBY_DIR" + err "Run 'just repos::clone' to clone repositories first." + exit 1 + fi + if ! command -v node &>/dev/null; then + err "Node.js is required for bar-lobby. Run 'just setup::deps'." + exit 1 + fi + info "Installing bar-lobby dependencies..." + cd "$LOBBY_DIR" + npm install + info "Starting bar-lobby dev server..." + echo " (Ctrl+C to stop the lobby; Docker services keep running)" + echo "" + __NV_PRIME_RENDER_OFFLOAD=1 \ + __GLX_VENDOR_LIBRARY_NAME=nvidia \ + LC_CTYPE=C \ + npm start -- -- --no-sandbox + +# Open a shell in a container +shell service="teiserver": + $COMPOSE --profile spads exec {{service}} bash + +# Build Docker images +build: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + install_dockerignore + info "Building Docker images..." + info " - Teiserver: compiling Elixir deps + generating TLS certs" + info " - SPADS: pulling pre-built image (badosu/spads:latest)" + echo "" + $COMPOSE build teiserver + $COMPOSE --profile spads pull spads + echo "" + ok "Images built successfully." + +# Destroy all data and rebuild from scratch +[confirm("This will destroy all data (database, SPADS state, engine cache). Continue?")] +reset: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + info "Stopping services and removing volumes..." + $COMPOSE --profile spads down -v + info "Rebuilding images from scratch..." + $COMPOSE build --no-cache teiserver + $COMPOSE --profile spads pull spads + ok "Reset complete. Run 'just services::up' to start fresh." diff --git a/just/setup.just b/just/setup.just new file mode 100644 index 0000000..0027dc8 --- /dev/null +++ b/just/setup.just @@ -0,0 +1,33 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +COMPOSE_FILE := DEVTOOLS_DIR / "docker-compose.dev.yml" +COMPOSE := "docker compose -f " + COMPOSE_FILE +REPOS_CONF := DEVTOOLS_DIR / "repos.conf" +REPOS_LOCAL := DEVTOOLS_DIR / "repos.local.conf" + +# Full first-time setup: install deps, clone repos, build images +init *args: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/repos.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + cmd_init {{args}} + +# Install system packages (docker, git, nodejs) +deps: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + cmd_install_deps + +# Check prerequisites and build Docker images +check: + #!/usr/bin/env bash + set -euo pipefail + source "$DEVTOOLS_DIR/scripts/common.sh" + source "$DEVTOOLS_DIR/scripts/repos.sh" + source "$DEVTOOLS_DIR/scripts/setup.sh" + cmd_setup diff --git a/just/tei.just b/just/tei.just new file mode 100644 index 0000000..6576706 --- /dev/null +++ b/just/tei.just @@ -0,0 +1,34 @@ +set export + +DEVTOOLS_DIR := justfile_directory() +COMPOSE_FILE := DEVTOOLS_DIR / "docker-compose.dev.yml" +COMPOSE := "docker compose -f " + COMPOSE_FILE +TEISERVER_DIR := DEVTOOLS_DIR / "teiserver" + +[private] +require-setup: + #!/usr/bin/env bash + if [ ! -d "{{TEISERVER_DIR}}" ]; then + echo "Error: teiserver is not cloned." >&2 + echo "Run: just setup::init" >&2 + exit 1 + fi + +# Initialize and migrate the test database +setup-test-db: require-setup + {{COMPOSE}} run --rm -e MIX_ENV=test --entrypoint "" \ + teiserver bash -c "mix ecto.create --quiet 2>/dev/null; mix ecto.migrate --quiet" + +# Run teiserver mix tests +mix *args: require-setup + {{COMPOSE}} run --rm -e MIX_ENV=test --entrypoint "" \ + -v {{TEISERVER_DIR}}/test:/app/test:z \ + teiserver mix test {{args}} + +alias test := mix + +# Drop into an interactive test shell for teiserver +test-shell: require-setup + {{COMPOSE}} run --rm -e MIX_ENV=test --entrypoint "" \ + -v {{TEISERVER_DIR}}/test:/app/test:z \ + teiserver bash -c "echo 'Entering teiserver test shell (MIX_ENV=test). Type exit to return.' && exec bash" diff --git a/repos.conf b/repos.conf index f248d81..8d44f79 100644 --- a/repos.conf +++ b/repos.conf @@ -32,5 +32,6 @@ BYAR-Chobby https://github.com/beyond-all-reason/BYAR-Chobby.git bar-db https://github.com/beyond-all-reason/bar-db.git master extra bar-live-services https://github.com/beyond-all-reason/bar-live-services.git main extra RecoilEngine https://github.com/beyond-all-reason/RecoilEngine.git master extra +lua-doc-extractor https://github.com/rhys-vdw/lua-doc-extractor main extra SPADS https://github.com/Yaribz/SPADS.git master extra SpringLobbyInterface https://github.com/Yaribz/SpringLobbyInterface.git master extra diff --git a/scripts/codemod-cargo.sh b/scripts/codemod-cargo.sh new file mode 100755 index 0000000..5228ef4 --- /dev/null +++ b/scripts/codemod-cargo.sh @@ -0,0 +1,20 @@ +#!/usr/bin/env bash +# Run cargo commands for bar-lua-codemod, using rust-dev distrobox if cargo isn't on PATH. +set -euo pipefail + +DEVTOOLS_DIR="${DEVTOOLS_DIR:?DEVTOOLS_DIR must be set}" +CODEMOD_DIR="$DEVTOOLS_DIR/bar-lua-codemod" + +source "$DEVTOOLS_DIR/scripts/common.sh" + +cd "$CODEMOD_DIR" + +if command -v cargo &>/dev/null; then + cargo "$@" +elif command -v distrobox &>/dev/null; then + info "cargo not on PATH, using ${DEVTOOLS_RUST_DISTROBOX:-rust-dev} distrobox..." + distrobox enter "${DEVTOOLS_RUST_DISTROBOX:-rust-dev}" -- cargo "$@" +else + err "cargo not found. Install Rust (rustup.rs) or create a rust-dev distrobox." + exit 1 +fi diff --git a/scripts/common.sh b/scripts/common.sh new file mode 100644 index 0000000..fcfc9ae --- /dev/null +++ b/scripts/common.sh @@ -0,0 +1,56 @@ +#!/usr/bin/env bash +# Shared helpers for BAR-Devtools scripts. +# Source this file; it only defines functions and variables. + +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +CYAN='\033[0;36m' +BOLD='\033[1m' +DIM='\033[2m' +NC='\033[0m' + +info() { echo -e "${BLUE}[info]${NC} $*"; } +ok() { echo -e "${GREEN}[ok]${NC} $*"; } +warn() { echo -e "${YELLOW}[warn]${NC} $*"; } +err() { echo -e "${RED}[error]${NC} $*"; } +step() { echo -e "${CYAN}[step]${NC} $*"; } + +# Remove a directory that may contain files owned by a container runtime. +clean_dir() { + local dir="$1" + [ -d "$dir" ] || return 0 + rm -rf "$dir" 2>/dev/null || true + [ -d "$dir" ] || return 0 + + local parent; parent="$(dirname "$dir")" + local name; name="$(basename "$dir")" + + if command -v podman &>/dev/null; then + warn "Retrying removal via podman unshare..." + podman unshare rm -rf "$dir" 2>/dev/null || true + [ -d "$dir" ] || return 0 + fi + + if command -v docker &>/dev/null; then + warn "Retrying removal via docker..." + docker run --rm -v "$parent:/p:z" alpine rm -rf "/p/$name" + return $? + fi + + err "Cannot remove $dir — files owned by another user" + err "Try: sudo rm -rf '$dir'" + return 1 +} + +# Re-execute the calling script inside a distrobox if DEVTOOLS_DISTROBOX is set. +# Just writes shebang scripts to temp files under /run/user/... which isn't +# shared with distrobox, so we feed the script via stdin (< "$0") before exec. +enter_distrobox() { + if [ -n "${DEVTOOLS_DISTROBOX:-}" ] && [ -z "${_DEVTOOLS_IN_DISTROBOX:-}" ]; then + info "Entering distrobox '$DEVTOOLS_DISTROBOX'..." + exec distrobox enter "$DEVTOOLS_DISTROBOX" -- \ + env _DEVTOOLS_IN_DISTROBOX=1 bash -s -- "$@" < "$0" + fi +} diff --git a/scripts/generate-branches.sh b/scripts/generate-branches.sh new file mode 100755 index 0000000..a21c271 --- /dev/null +++ b/scripts/generate-branches.sh @@ -0,0 +1,399 @@ +#!/usr/bin/env bash +# Deterministically rebuild fmt, leaf (mig-*), and mig branches. +# Called by: just bar::fmt-mig-generate [--push] [--update-prs] +set -euo pipefail + +source "${DEVTOOLS_DIR}/scripts/common.sh" + +# ─── Config ────────────────────────────────────────────────────────────────── + +CODEMOD="${CODEMOD_BIN:-${DEVTOOLS_DIR}/bar-lua-codemod/target/release/bar-lua-codemod}" +BAR="${BAR_DIR:-${DEVTOOLS_DIR}/Beyond-All-Reason}" +REMOTE="${PUSH_REMOTE:-upstream}" +FMT_BASE="${FMT_BASE:-stylua}" + +FMT_PR="https://github.com/beyond-all-reason/Beyond-All-Reason/pull/7199" +MIG_PR="https://github.com/beyond-all-reason/Beyond-All-Reason/pull/7229" + +# Dirty check only on the host -- inside distrobox stdin is piped via enter_distrobox. +# Exclude recoil-lua-library since lua::library intentionally dirties it before we run. +if [[ -z "${_DEVTOOLS_IN_DISTROBOX:-}" ]] && [[ -n "$(git -C "$BAR" status --porcelain -- ':!recoil-lua-library' 2>/dev/null)" ]]; then + warn "BAR working tree has uncommitted changes." + warn "They will be discarded by branch checkouts." + echo -n "Continue? [y/N] " + read -r answer + if [[ "$answer" != [yY] ]]; then + err "Aborted" + exit 1 + fi +fi + +enter_distrobox "$@" + +# ─── Transform registry (order matters for the linear mig branch) ──────────── + +TRANSFORMS=("bracket_to_dot" "spring_split") + +# -- bracket-to-dot ----------------------------------------------------------- + +bracket_to_dot_branch="mig-bracket" +bracket_to_dot_commit="gen(bar_codemod): murder bracket accessors" +bracket_to_dot_pr="https://github.com/keithharvey/bar/pull/29" +bracket_to_dot_description="" + +run_bracket_to_dot() { + "$CODEMOD" bracket-to-dot --path "$BAR" --exclude common/luaUtilities +} + +describe_bracket_to_dot() { + cat <<'EOF' +# bracket-to-dot - convert x["y"] to x.y and ["y"] = to y = +bar-lua-codemod bracket-to-dot --path "$BAR_DIR" --exclude common/luaUtilities +EOF +} + +# -- spring-split -------------------------------------------------------------- + +spring_split_branch="mig-spring-split" +spring_split_commit="gen(bar_codemod): rename-aliases, detach-bar-modules, spring-split" +spring_split_pr="https://github.com/keithharvey/bar/pull/28" +spring_split_description='See [RecoilEngine#2799](https://github.com/beyond-all-reason/RecoilEngine/pull/2799) for the SpringSynced/SpringUnsynced/SpringShared type split on the engine side.' + +run_spring_split() { + "$CODEMOD" rename-aliases --path "$BAR" --exclude common/luaUtilities + "$CODEMOD" detach-bar-modules --path "$BAR" --exclude common/luaUtilities + "$CODEMOD" spring-split --path "$BAR" --library "$BAR/recoil-lua-library/library" --exclude common/luaUtilities +} + +describe_spring_split() { + cat <<'EOF' +# rename-aliases -- deprecated aliases, e.g. GetMyTeamID -> GetLocalTeamID +bar-lua-codemod rename-aliases --path "$BAR_DIR" --exclude common/luaUtilities + +# detach-bar-modules -- moves I18N, Utilities, Debug, Lava, GetModOptionsCopy off the Spring table +bar-lua-codemod detach-bar-modules --path "$BAR_DIR" --exclude common/luaUtilities + +# spring-split - split Spring into SpringSynced, SpringUnsynced, and SpringShared +bar-lua-codemod spring-split --path "$BAR_DIR" --library "$BAR_DIR/recoil-lua-library/library" --exclude common/luaUtilities +EOF +} + +# ─── Helpers ───────────────────────────────────────────────────────────────── + +tvar() { eval echo "\${${1}_${2}:-}"; } + +host_exec() { + if [ -f /run/.containerenv ] && command -v distrobox-host-exec &>/dev/null; then + distrobox-host-exec "$@" + else + "$@" + fi +} + +git_bar() { host_exec git -C "$BAR" "$@"; } + +gh_host() { host_exec gh "$@"; } + +stylua_pass() { + step "Running stylua..." + (cd "$BAR" && lx --lua-version 5.1 exec stylua .) +} + +# ─── PR body generators ───────────────────────────────────────────────────── + +generate_leaf_pr_body() { + local transform="$1" output_file="$2" + local description + description=$(tvar "$transform" "description") + + cat <
+stylua # or: just bar::fmt +git add -A && git rebase --continue +``` + +The formatter is idempotent -- your logical changes survive and the formatting matches master. +BODY +} + +generate_mig_pr_body() { + local output_file="$1" + + cat <<'HEADER' +### What's it do + +Automated AST transform via bar-lua-codemod (full-moon 2.1.1 over in [BAR-Devtools](https://github.com/thvl3/BAR-Devtools/pull/5)) + +### Goals (so far) + +* fix all type errors +* normalize to dot notation +* split Spring into more specific types + +### Current Commands + +HEADER + + for transform in "${TRANSFORMS[@]}"; do + echo "- **\`${transform//_/-}\`** -- $(describe_${transform} | head -1 | sed 's/^# //')" + done + + cat <<'MID' + +### Split PRs for Individual Command Preview + +Each sub-PR isolates a single command's output for reviewability: + +MID + + for transform in "${TRANSFORMS[@]}"; do + local branch pr_url + branch=$(tvar "$transform" "branch") + pr_url=$(tvar "$transform" "pr") + if [[ -n "$pr_url" ]]; then + echo "* ${transform//_/-} -- $pr_url" + fi + done + + cat <<'TAIL' + +The great thing about this setup is that we can add arbitrary commands to the text transform, train people to run `just bar::fmt-mig`, and we can mass code-transform as an org. + +### Note on Lua 5.4 + +This same code transform and approach would easily get us to Lua 5.4 on the game side. The engine is another matter, but here is an example of what that would look like: + +```rust +let ast = full_moon::parse(&code)?; // parse 5.1 input +let ast = migrator.visit_ast(ast); // transform: rewrite 5.1 patterns to 5.4 equivalents +fs::write(&path, full_moon::print(&ast)); // write result (now valid 5.4) +``` + +### Output Summary + +``` +TAIL + cat "$output_file" + echo '```' +} + +# ─── Core flow ─────────────────────────────────────────────────────────────── + +build_fmt() { + step "Fetching origin..." + git_bar fetch origin + + step "Creating fmt branch from $FMT_BASE..." + git_bar checkout --force -B fmt "$FMT_BASE" + + stylua_pass + + git_bar add -A + git_bar commit -m "gen(stylua): initial formatting of entire codebase" + + local fmt_hash + fmt_hash=$(git_bar rev-parse HEAD) + + step "Creating .git-blame-ignore-revs..." + printf '%s\n' \ + "# gen(stylua): initial formatting of entire codebase https://github.com/beyond-all-reason/Beyond-All-Reason/pull/7199" \ + "$fmt_hash" \ + > "$BAR/.git-blame-ignore-revs" + git_bar add .git-blame-ignore-revs + git_bar commit -m "git-blame-ignore-revs" + + local pr_body_file="$BAR/.git/fmt-pr-body.md" + generate_fmt_pr_body > "$pr_body_file" + ok "fmt branch ready (PR body: $pr_body_file)" + + if [[ "$DO_UPDATE_PRS" == "true" ]] && [[ -n "$FMT_PR" ]]; then + step "Updating fmt PR $FMT_PR..." + gh_host pr edit "$FMT_PR" --body-file "$pr_body_file" + ok "fmt PR updated" + fi +} + +build_leaf() { + local transform="$1" + local branch commit_msg pr_url + branch=$(tvar "$transform" "branch") + commit_msg=$(tvar "$transform" "commit") + pr_url=$(tvar "$transform" "pr") + + step "Building leaf: $branch" + git_bar checkout --force -B "$branch" fmt + + local output_file + output_file=$(mktemp /tmp/generate-branches.XXXXXX) + "run_${transform}" 2>&1 | tee "$output_file" + + stylua_pass + + git_bar add -A + git_bar commit -m "$commit_msg" + + local pr_body_file="$BAR/.git/${branch}-pr-body.md" + generate_leaf_pr_body "$transform" "$output_file" > "$pr_body_file" + rm -f "$output_file" + + ok "Leaf $branch ready (PR body: $pr_body_file)" + + if [[ "$DO_UPDATE_PRS" == "true" ]] && [[ -n "$pr_url" ]]; then + step "Updating PR $pr_url..." + gh_host pr edit "$pr_url" --body-file "$pr_body_file" + ok "PR updated" + fi +} + +build_mig() { + step "Building linear mig branch..." + git_bar checkout --force -B mig fmt + + local mig_output_file + mig_output_file=$(mktemp /tmp/generate-branches-mig.XXXXXX) + local transform_hashes=() + + for transform in "${TRANSFORMS[@]}"; do + local commit_msg + commit_msg=$(tvar "$transform" "commit") + step "mig: $transform" + "run_${transform}" 2>&1 | tee -a "$mig_output_file" + stylua_pass + git_bar add -A + git_bar commit -m "$commit_msg" + transform_hashes+=("$(git_bar rev-parse HEAD)") + done + + step "Adding transform commits to .git-blame-ignore-revs..." + for i in "${!TRANSFORMS[@]}"; do + local transform="${TRANSFORMS[$i]}" + local commit_msg + commit_msg=$(tvar "$transform" "commit") + printf '\n# %s\n%s\n' "$commit_msg" "${transform_hashes[$i]}" \ + >> "$BAR/.git-blame-ignore-revs" + done + git_bar add .git-blame-ignore-revs + git_bar commit -m "git-blame-ignore-revs: add transform commits" + + local pr_body_file="$BAR/.git/mig-pr-body.md" + generate_mig_pr_body "$mig_output_file" > "$pr_body_file" + rm -f "$mig_output_file" + + ok "mig branch ready ($((${#TRANSFORMS[@]} + 1)) commits, PR body: $pr_body_file)" + + if [[ "$DO_UPDATE_PRS" == "true" ]] && [[ -n "$MIG_PR" ]]; then + step "Updating mig PR $MIG_PR..." + gh_host pr edit "$MIG_PR" --body-file "$pr_body_file" + ok "mig PR updated" + fi +} + +push_branches() { + local branches=("fmt" "mig") + for transform in "${TRANSFORMS[@]}"; do + branches+=("$(tvar "$transform" "branch")") + done + + step "Force-pushing: ${branches[*]} -> $REMOTE" + git_bar push "$REMOTE" --force-with-lease "${branches[@]}" + ok "All branches pushed to $REMOTE" +} + +# ─── CLI ───────────────────────────────────────────────────────────────────── + +DO_PUSH=false +DO_UPDATE_PRS=false + +while [[ $# -gt 0 ]]; do + case "$1" in + --push) DO_PUSH=true; shift ;; + --update-prs) DO_UPDATE_PRS=true; shift ;; + -h|--help) + echo "Usage: generate-branches.sh [--push] [--update-prs]" + echo "" + echo "Reconstructs fmt, leaf (mig-*), and mig branches from \$FMT_BASE ($FMT_BASE)." + echo "" + echo "Flags:" + echo " --push Force-push all branches to $REMOTE" + echo " --update-prs Update all PR descriptions via gh" + exit 0 + ;; + *) + err "Unknown flag: $1" + exit 1 + ;; + esac +done + +# ─── Run ───────────────────────────────────────────────────────────────────── + +build_fmt + +for transform in "${TRANSFORMS[@]}"; do + build_leaf "$transform" +done + +build_mig + +if [[ "$DO_PUSH" == "true" ]]; then + push_branches +fi + +echo "" +ok "All branches rebuilt." +leaf_names="" +for transform in "${TRANSFORMS[@]}"; do + leaf_names+="$(tvar "$transform" "branch"), " +done +info " fmt, ${leaf_names}mig" diff --git a/scripts/repos.sh b/scripts/repos.sh new file mode 100644 index 0000000..4c48260 --- /dev/null +++ b/scripts/repos.sh @@ -0,0 +1,212 @@ +#!/usr/bin/env bash +# Repository management helpers. +# Expects: DEVTOOLS_DIR, REPOS_CONF, REPOS_LOCAL (exported by Justfile) +# Source scripts/common.sh before this file. + +declare -a REPO_DIRS=() REPO_URLS=() REPO_BRANCHES=() REPO_GROUPS=() REPO_LOCAL_PATHS=() + +load_repos_conf() { + REPO_DIRS=(); REPO_URLS=(); REPO_BRANCHES=(); REPO_GROUPS=(); REPO_LOCAL_PATHS=() + local -A seen=() + + _parse_conf() { + local file="$1" + [ -f "$file" ] || return 0 + while IFS= read -r line || [ -n "$line" ]; do + line="${line%%#*}" + line="$(echo "$line" | xargs 2>/dev/null || true)" + [ -z "$line" ] && continue + local dir url branch group local_path + read -r dir url branch group local_path <<< "$line" + [ -z "$dir" ] || [ -z "$url" ] && continue + branch="${branch:-master}" + group="${group:-extra}" + local_path="${local_path/#\~/$HOME}" + seen[$dir]="$url $branch $group $local_path" + done < "$file" + } + + _parse_conf "$REPOS_CONF" + _parse_conf "$REPOS_LOCAL" + + local dir + for dir in "${!seen[@]}"; do + local url branch group local_path + read -r url branch group local_path <<< "${seen[$dir]}" + REPO_DIRS+=("$dir") + REPO_URLS+=("$url") + REPO_BRANCHES+=("$branch") + REPO_GROUPS+=("$group") + REPO_LOCAL_PATHS+=("$local_path") + done +} + +clone_or_update_repo() { + local dir="$1" url="$2" branch="$3" local_path="${4:-}" target="$DEVTOOLS_DIR/$dir" + + if [ -n "$local_path" ]; then + if [ ! -d "$local_path" ]; then + warn " ${dir}: local path does not exist: ${local_path}" + return 1 + fi + if [ -L "$target" ]; then + local current_link + current_link="$(readlink "$target")" + if [ "$current_link" = "$local_path" ]; then + ok " ${dir}: linked -> ${local_path}" + else + warn " ${dir}: symlink points to ${current_link}, config says ${local_path}" + info " ${dir}: updating symlink..." + rm "$target" + ln -s "$local_path" "$target" + ok " ${dir}: linked -> ${local_path}" + fi + elif [ -d "$target" ]; then + warn " ${dir}: exists as a real directory but config says link to ${local_path}" + warn " ${dir}: remove it manually to use the local path" + else + ln -s "$local_path" "$target" + ok " ${dir}: linked -> ${local_path}" + fi + return 0 + fi + + if [ -d "$target/.git" ]; then + local current_url + current_url="$(git -C "$target" remote get-url origin 2>/dev/null || true)" + if [ "$current_url" != "$url" ] && [ -n "$current_url" ]; then + warn " ${dir}: origin is ${current_url}" + warn " ${dir}: config says ${url}" + warn " ${dir}: add to repos.local.conf to set your preferred remote" + fi + info " ${dir}: fetching latest..." + git -C "$target" fetch origin --quiet 2>/dev/null || warn " ${dir}: fetch failed (offline?)" + local current_branch + current_branch="$(git -C "$target" branch --show-current 2>/dev/null)" + if [ -n "$current_branch" ] && [ "$current_branch" != "$branch" ]; then + info " ${dir}: on branch '${current_branch}' (config says '${branch}')" + fi + else + info " ${dir}: cloning ${url} (branch: ${branch})..." + git clone --recurse-submodules --branch "$branch" "$url" "$target" 2>&1 | sed 's/^/ /' + fi +} + +cmd_clone() { + local group_filter="${1:-all}" + load_repos_conf + + if [ "${#REPO_DIRS[@]}" -eq 0 ]; then + err "No repositories found in repos.conf" + exit 1 + fi + + echo -e "${BOLD}=== Cloning / Updating Repositories ===${NC}" + echo "" + + if [ -f "$REPOS_LOCAL" ]; then + info "Using overrides from repos.local.conf" + echo "" + fi + + local i cloned=0 updated=0 skipped=0 linked=0 + for i in "${!REPO_DIRS[@]}"; do + local dir="${REPO_DIRS[$i]}" + local url="${REPO_URLS[$i]}" + local branch="${REPO_BRANCHES[$i]}" + local group="${REPO_GROUPS[$i]}" + local local_path="${REPO_LOCAL_PATHS[$i]}" + + if [ "$group_filter" != "all" ] && [ "$group" != "$group_filter" ]; then + skipped=$((skipped + 1)) + continue + fi + + if [ -n "$local_path" ]; then + clone_or_update_repo "$dir" "$url" "$branch" "$local_path" + linked=$((linked + 1)) + elif [ -d "$DEVTOOLS_DIR/$dir/.git" ]; then + clone_or_update_repo "$dir" "$url" "$branch" + updated=$((updated + 1)) + else + clone_or_update_repo "$dir" "$url" "$branch" + cloned=$((cloned + 1)) + fi + done + + echo "" + local summary="${cloned} cloned, ${updated} updated, ${skipped} skipped" + [ "$linked" -gt 0 ] && summary+=", ${linked} linked" + ok "Repos: ${summary}" +} + +cmd_repos() { + load_repos_conf + + echo -e "${BOLD}=== Repository Status ===${NC}" + echo "" + printf " ${DIM}%-24s %-8s %-18s %s${NC}\n" "DIRECTORY" "GROUP" "BRANCH" "STATUS" + echo " $(printf '%.0s-' {1..80})" + + local i + for i in "${!REPO_DIRS[@]}"; do + local dir="${REPO_DIRS[$i]}" + local group="${REPO_GROUPS[$i]}" + local target="$DEVTOOLS_DIR/$dir" + + local status current_branch + if [ -L "$target" ]; then + local link_dest + link_dest="$(readlink "$target")" + if [ -d "$target/.git" ]; then + current_branch="$(git -C "$target" branch --show-current 2>/dev/null || echo "detached")" + local dirty="" + if ! git -C "$target" diff --quiet 2>/dev/null || ! git -C "$target" diff --cached --quiet 2>/dev/null; then + dirty=" ${YELLOW}*dirty*${NC}" + fi + status="${CYAN}local${NC}${dirty} -> ${link_dest}" + else + status="${RED}broken link${NC} -> ${link_dest}" + current_branch="-" + fi + elif [ -d "$target/.git" ]; then + current_branch="$(git -C "$target" branch --show-current 2>/dev/null || echo "detached")" + local dirty="" + if ! git -C "$target" diff --quiet 2>/dev/null || ! git -C "$target" diff --cached --quiet 2>/dev/null; then + dirty=" ${YELLOW}*dirty*${NC}" + fi + local branch="${REPO_BRANCHES[$i]}" + if [ "$current_branch" = "$branch" ]; then + status="${GREEN}ok${NC}${dirty}" + else + status="${YELLOW}branch: ${current_branch}${NC}${dirty}" + fi + else + status="${RED}missing${NC}" + current_branch="-" + fi + + printf " %-24s %-8s %-18s %b\n" "$dir" "$group" "$current_branch" "$status" + done + echo "" +} + +cmd_update() { + echo -e "${BOLD}=== Updating All Repositories ===${NC}" + echo "" + load_repos_conf + + local i + for i in "${!REPO_DIRS[@]}"; do + local dir="${REPO_DIRS[$i]}" + local target="$DEVTOOLS_DIR/$dir" + if [ -d "$target/.git" ]; then + local branch + branch="$(git -C "$target" branch --show-current 2>/dev/null)" + info "${dir}: pulling ${branch}..." + git -C "$target" pull --ff-only 2>&1 | sed 's/^/ /' || warn " ${dir}: pull failed (conflicts?)" + fi + done + echo "" + ok "Update complete." +} diff --git a/scripts/setup.sh b/scripts/setup.sh new file mode 100644 index 0000000..4f58311 --- /dev/null +++ b/scripts/setup.sh @@ -0,0 +1,498 @@ +#!/usr/bin/env bash +# Setup, dependency installation, and prerequisite checks. +# Expects: DEVTOOLS_DIR, COMPOSE, REPOS_CONF (exported by Justfile) +# Source scripts/common.sh and scripts/repos.sh before this file. + +detect_distro() { + if [[ "$OSTYPE" == darwin* ]]; then + echo "macos" + elif command -v pacman &>/dev/null; then + echo "arch" + elif command -v apt-get &>/dev/null; then + echo "debian" + elif command -v dnf &>/dev/null; then + echo "fedora" + else + echo "unknown" + fi +} + +pkg_install_cmd() { + case "$(detect_distro)" in + macos) echo "brew install" ;; + arch) echo "sudo pacman -S --needed" ;; + debian) echo "sudo apt install -y" ;; + fedora) echo "sudo dnf install -y" ;; + *) echo "" ;; + esac +} + +pkg_name() { + local generic="$1" + local distro + distro="$(detect_distro)" + case "${distro}:${generic}" in + macos:docker) echo "--cask docker" ;; + macos:docker-compose) echo "" ;; + macos:git) echo "git" ;; + macos:nodejs) echo "node" ;; + arch:docker) echo "docker" ;; + arch:docker-compose) echo "docker-compose" ;; + arch:git) echo "git" ;; + arch:nodejs) echo "nodejs npm" ;; + debian:docker) echo "docker.io" ;; + debian:docker-compose) echo "docker-compose-plugin" ;; + debian:git) echo "git" ;; + debian:nodejs) echo "nodejs npm" ;; + fedora:docker) echo "docker-ce docker-ce-cli containerd.io" ;; + fedora:docker-compose) echo "docker-compose-plugin" ;; + fedora:git) echo "git" ;; + fedora:nodejs) echo "nodejs npm" ;; + *) echo "$generic" ;; + esac +} + +check_git() { + if ! command -v git &>/dev/null; then + err "git is not installed." + return 1 + fi + ok "git $(git --version | awk '{print $3}') detected" +} + +check_docker() { + if ! command -v docker &>/dev/null; then + err "Docker is not installed." + return 1 + fi + if ! docker info &>/dev/null; then + err "Docker daemon is not running or current user lacks permissions." + echo "" + if [[ "$OSTYPE" == darwin* ]]; then + echo " Open Docker Desktop to start the daemon." + else + echo " Start the daemon: sudo systemctl start docker" + echo " Enable on boot: sudo systemctl enable docker" + echo " Add yourself: sudo usermod -aG docker \$USER (then re-login)" + fi + echo "" + return 1 + fi + if ! docker compose version &>/dev/null; then + err "Docker Compose V2 plugin is not installed." + return 1 + fi + ok "Docker $(docker --version | awk '{print $3}' | tr -d ',') + Compose V2 detected" +} + +check_node() { + if ! command -v node &>/dev/null; then + warn "Node.js not found (needed for bar-lobby only)." + return 1 + fi + ok "Node.js $(node --version) detected" +} + +check_ports() { + local pg_port="${BAR_POSTGRES_PORT:-5433}" + local ports=(4000 "$pg_port" 8200 8201 8888) + local conflict=0 + for port in "${ports[@]}"; do + if [[ "$OSTYPE" == darwin* ]]; then + if lsof -iTCP:"$port" -sTCP:LISTEN &>/dev/null; then + warn "Port ${port} is already in use" + conflict=1 + fi + else + if ss -tlnp 2>/dev/null | grep -q ":${port} "; then + warn "Port ${port} is already in use" + conflict=1 + fi + fi + done + if [ "$conflict" -eq 1 ]; then + warn "Some ports are in use. Services binding to those ports may fail to start." + else + ok "Required ports available (4000, ${pg_port}, 8200, 8201, 8888)" + fi +} + +check_prerequisites() { + echo -e "${BOLD}Checking prerequisites...${NC}" + echo "" + local failed=0 + check_git || failed=1 + check_docker || failed=1 + check_node || true + check_ports + echo "" + if [ "$failed" -ne 0 ]; then + err "Missing required prerequisites. Run 'just setup::deps' or fix manually." + return 1 + fi +} + +install_dockerignore() { + local target="$DEVTOOLS_DIR/teiserver/.dockerignore" + local source="$DEVTOOLS_DIR/docker/teiserver.dockerignore" + if [ -f "$source" ] && [ ! -f "$target" ]; then + cp "$source" "$target" + info "Installed .dockerignore for teiserver build context" + fi +} + +cmd_install_deps() { + echo -e "${BOLD}=== Install System Dependencies ===${NC}" + echo "" + + local distro + distro="$(detect_distro)" + local install_cmd + install_cmd="$(pkg_install_cmd)" + + if [ "$distro" = "unknown" ] || [ -z "$install_cmd" ]; then + err "Unsupported distro. Install these manually: git, docker, docker-compose, nodejs, npm" + exit 1 + fi + + info "Detected distro: ${BOLD}${distro}${NC}" + echo "" + + local missing=() + + if ! command -v git &>/dev/null; then + missing+=("git") + fi + if ! command -v docker &>/dev/null; then + missing+=("docker") + fi + if ! docker compose version &>/dev/null 2>&1; then + missing+=("docker-compose") + fi + if ! command -v node &>/dev/null; then + missing+=("nodejs") + fi + + if [ "${#missing[@]}" -eq 0 ]; then + ok "All dependencies already installed." + echo "" + + if ! docker info &>/dev/null; then + warn "Docker is installed but the daemon isn't running or you lack permissions." + echo "" + if [[ "$OSTYPE" == darwin* ]]; then + echo " Open Docker Desktop to start the daemon." + else + echo " sudo systemctl start docker" + echo " sudo systemctl enable docker" + echo " sudo usermod -aG docker \$USER # then re-login" + fi + echo "" + fi + return 0 + fi + + local packages="" + for dep in "${missing[@]}"; do + packages+=" $(pkg_name "$dep")" + done + + info "Missing: ${missing[*]}" + info "Will run: ${install_cmd}${packages}" + echo "" + + read -rp "Install now? [Y/n] " confirm + if [[ "$confirm" =~ ^[Nn]$ ]]; then + echo "Skipped. Install manually and retry." + return 1 + fi + + $install_cmd $packages + + echo "" + + if [[ " ${missing[*]} " == *" docker "* ]]; then + if [[ "$OSTYPE" == darwin* ]]; then + warn "Docker Desktop was installed. Open it to finish setup, then re-run: just setup::init" + return 0 + else + info "Enabling and starting Docker daemon..." + sudo systemctl enable --now docker 2>/dev/null || true + + if ! groups | grep -qw docker; then + info "Adding $USER to the docker group (re-login required)..." + sudo usermod -aG docker "$USER" + warn "You need to log out and back in for Docker group membership to take effect." + warn "After re-login, run: just setup::init" + return 0 + fi + fi + fi + + ok "Dependencies installed successfully." +} + +cmd_init() { + local clone_extras=0 + for arg in "$@"; do + case "$arg" in + extras|all) clone_extras=1 ;; + esac + done + + echo -e "${BOLD}==========================================${NC}" + echo -e "${BOLD} BAR Dev Environment - First Time Setup${NC}" + echo -e "${BOLD}==========================================${NC}" + echo "" + + step "1/5 Checking & installing dependencies" + echo "" + local deps_ok=0 + if check_git &>/dev/null && check_docker &>/dev/null; then + deps_ok=1 + ok "Core dependencies (git, docker) already installed." + check_node || true + else + cmd_install_deps || { err "Dependency installation failed. Fix and retry."; exit 1; } + deps_ok=1 + fi + echo "" + + step "2/5 Cloning repositories" + echo "" + if [ ! -f "$REPOS_CONF" ]; then + err "repos.conf not found at: $REPOS_CONF" + exit 1 + fi + cmd_clone core + echo "" + + if [ "$clone_extras" -eq 1 ]; then + cmd_clone extra + echo "" + else + read -rp "Also clone extra repositories (game engine, SPADS source, infra)? [y/N] " extras + if [[ "$extras" =~ ^[Yy]$ ]]; then + cmd_clone extra + echo "" + fi + fi + + step "3/5 Building Docker images" + echo "" + install_dockerignore + info "Building Docker images..." + $COMPOSE build teiserver + $COMPOSE --profile spads pull spads + ok "Images built successfully." + echo "" + + if [ -d "$DEVTOOLS_DIR/RecoilEngine/docker-build-v2" ]; then + step "4/5 Engine build" + echo "" + read -rp "Build engine from source? [y/N] " build_engine + if [[ "$build_engine" =~ ^[Yy]$ ]]; then + local engine_arch + case "$(uname -m)" in + x86_64) engine_arch="amd64" ;; + aarch64|arm64) engine_arch="arm64" ;; + *) engine_arch="amd64" ;; + esac + info "Building Recoil engine (${engine_arch}-linux, this may take a while)..." + bash "$DEVTOOLS_DIR/RecoilEngine/docker-build-v2/build.sh" --arch "$engine_arch" linux + fi + echo "" + else + step "4/5 Engine build" + echo "" + info "RecoilEngine not cloned -- skipping. Clone with: just repos::clone extra" + echo "" + fi + + step "5/5 Symlinks to game directory" + echo "" + local game_dir + game_dir="$(detect_game_dir 2>/dev/null)" || true + if [ -z "$game_dir" ]; then + info "No game directory detected. Set BAR_GAME_DIR to enable linking." + echo "" + else + local available=() + [ -d "$DEVTOOLS_DIR/RecoilEngine" ] && available+=("engine") + [ -d "$DEVTOOLS_DIR/BYAR-Chobby" ] && available+=("chobby") + [ -d "$DEVTOOLS_DIR/Beyond-All-Reason" ] && available+=("bar") + + if [ "${#available[@]}" -gt 0 ]; then + echo " Available repos to symlink into $game_dir:" + for name in "${available[@]}"; do + case "$name" in + engine) echo -e " ${BOLD}engine${NC} -> $game_dir/engine/local-build/" ;; + chobby) echo -e " ${BOLD}chobby${NC} -> $game_dir/games/BYAR-Chobby/" ;; + bar) echo -e " ${BOLD}bar${NC} -> $game_dir/games/Beyond-All-Reason/" ;; + esac + done + echo "" + warn "This will replace any existing directories at these paths with symlinks." + read -rp "Symlink all? [y/N] " do_link + if [[ "$do_link" =~ ^[Yy]$ ]]; then + BAR_GAME_DIR="$game_dir" + for name in "${available[@]}"; do + cmd_link "$name" + done + fi + else + info "No linkable repos cloned yet." + fi + fi + echo "" + + echo -e "${BOLD}=== Setup Complete ===${NC}" + echo "" + echo " Your workspace is ready. Next steps:" + echo "" + echo -e " ${BOLD}just services::up${NC} Start Teiserver + PostgreSQL" + echo -e " ${BOLD}just services::up lobby${NC} ...and launch bar-lobby" + echo -e " ${BOLD}just services::up spads${NC} ...and start SPADS autohost" + echo -e " ${BOLD}just engine::build linux${NC} Build the Recoil engine" + echo -e " ${BOLD}just link::status${NC} Show symlink status" + echo -e " ${BOLD}just repos::status${NC} Show repository status" + echo "" + echo " To use your own forks, copy repos.conf to repos.local.conf" + echo " and edit the URLs/branches. Then run: just repos::clone" + echo "" +} + +cmd_setup() { + echo -e "${BOLD}=== BAR Dev Environment Setup ===${NC}" + echo "" + check_prerequisites || exit 1 + + local missing_core=0 + load_repos_conf + for i in "${!REPO_DIRS[@]}"; do + if [ "${REPO_GROUPS[$i]}" = "core" ] && [ ! -d "$DEVTOOLS_DIR/${REPO_DIRS[$i]}/.git" ]; then + missing_core=1 + break + fi + done + + if [ "$missing_core" -eq 1 ]; then + warn "Core repositories are missing. Cloning them now..." + echo "" + cmd_clone core + echo "" + fi + + install_dockerignore + info "Building Docker images..." + $COMPOSE build teiserver + $COMPOSE --profile spads pull spads + ok "Images built successfully." + + echo "" + echo -e " Next steps:" + echo -e " ${BOLD}just services::up${NC} Start all services" + echo -e " ${BOLD}just services::up lobby${NC} Start all services + bar-lobby" + echo "" +} + +detect_game_dir() { + if [ -n "${BAR_GAME_DIR:-}" ]; then + echo "$BAR_GAME_DIR" + return 0 + fi + local xdg_state="${XDG_STATE_HOME:-$HOME/.local/state}" + local candidate="$xdg_state/Beyond All Reason" + if [ -d "$candidate" ]; then + echo "$candidate" + return 0 + fi + return 1 +} + +cmd_link() { + local target="${1:-}" + local game_dir + game_dir="$(detect_game_dir 2>/dev/null)" || true + + if [ -z "$target" ]; then + echo -e "${BOLD}=== Symlink Status ===${NC}" + echo "" + if [ -z "$game_dir" ]; then + warn "Game directory not found. Set BAR_GAME_DIR env var or install BAR to the default location." + echo "" + return 0 + fi + info "Game directory: ${game_dir}" + echo "" + + local -A link_map=( + [engine]="$game_dir/engine/local-build" + [chobby]="$game_dir/games/BYAR-Chobby" + [bar]="$game_dir/games/Beyond-All-Reason" + ) + for name in engine chobby bar; do + local link_path="${link_map[$name]}" + if [ -L "$link_path" ]; then + local link_target + link_target="$(readlink -f "$link_path" 2>/dev/null || echo "?")" + printf " %-10s ${GREEN}linked${NC} -> %s\n" "$name" "$link_target" + elif [ -e "$link_path" ]; then + printf " %-10s ${YELLOW}exists (not a symlink)${NC} at %s\n" "$name" "$link_path" + else + printf " %-10s ${DIM}not linked${NC}\n" "$name" + fi + done + echo "" + return 0 + fi + + if [ -z "$game_dir" ]; then + err "Game directory not found. Set BAR_GAME_DIR env var or install BAR to the default location." + exit 1 + fi + + local source_path link_path + case "$target" in + engine) + source_path="$DEVTOOLS_DIR/RecoilEngine/build-linux/install" + link_path="$game_dir/engine/local-build" + ;; + chobby) + source_path="$DEVTOOLS_DIR/BYAR-Chobby" + link_path="$game_dir/games/BYAR-Chobby" + ;; + bar) + source_path="$DEVTOOLS_DIR/Beyond-All-Reason" + link_path="$game_dir/games/Beyond-All-Reason" + ;; + *) + err "Unknown link target: $target" + echo " Valid targets: engine, chobby, bar" + exit 1 + ;; + esac + + if [ ! -e "$source_path" ] && [ ! -L "$source_path" ]; then + err "Source not found: $source_path" + if [ "$target" = "engine" ]; then + echo " Build the engine first: just engine::build linux" + else + echo " Clone the repo first: just repos::clone extra" + fi + exit 1 + fi + + if [ -L "$link_path" ]; then + info "Replacing existing symlink at $link_path" + rm "$link_path" + elif [ -e "$link_path" ]; then + warn "$link_path already exists and is not a symlink. Skipping." + warn "Remove it manually if you want to replace it." + return 1 + fi + + mkdir -p "$(dirname "$link_path")" + ln -s "$source_path" "$link_path" + ok "Linked $target: $link_path -> $source_path" +}