From a38a2181251387cf1edcf65b194d887b493d326a Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sat, 11 Oct 2025 20:17:49 +0200 Subject: [PATCH 01/31] Shoginet ts rewrite --- .gitignore | 9 +- README.md | 117 ++- biome.json | 41 + config.py | 363 ------- config/default.json | 18 + config/local.json | 1 + config/test.json | 4 + consts.py | 20 - cpuid.py | 239 ----- engines.py | 258 ----- engines/.gitignore | 2 + errors.py | 14 - intro.py | 16 - logger.py | 130 --- package-lock.json | 1092 ++++++++++++++++++++ package.json | 45 + progressReporter.py | 57 - build-fairy.sh => scripts/fairy.sh | 9 +- build-yaneuraou.sh => scripts/yaneuraou.sh | 27 +- shoginet.py | 229 ---- signals.py | 33 - src/config/client.ts | 48 + src/config/server.ts | 61 ++ src/consts.ts | 19 + src/engine.ts | 255 +++++ src/http.ts | 113 ++ src/logger.ts | 21 + src/main.ts | 65 ++ src/stats-reporter.ts | 38 + src/systemd.ts | 39 + src/types.ts | 73 ++ src/version.ts | 52 + src/work/analysis.ts | 86 ++ src/work/move.ts | 40 + src/work/puzzle.ts | 93 ++ src/work/util.ts | 55 + src/worker-manager.ts | 163 +++ src/worker.ts | 173 ++++ systemd.py | 127 --- test/main.ts | 96 ++ test/server-config.json | 92 ++ test/works.ts | 100 ++ tsconfig.json | 36 + util.py | 94 -- worker.py | 513 --------- 45 files changed, 3010 insertions(+), 2166 deletions(-) create mode 100644 biome.json delete mode 100644 config.py create mode 100644 config/default.json create mode 100644 config/local.json create mode 100644 config/test.json delete mode 100644 consts.py delete mode 100644 cpuid.py delete mode 100644 engines.py create mode 100644 engines/.gitignore delete mode 100644 errors.py delete mode 100644 intro.py delete mode 100644 logger.py create mode 100644 package-lock.json create mode 100644 package.json delete mode 100644 progressReporter.py rename build-fairy.sh => scripts/fairy.sh (78%) rename build-yaneuraou.sh => scripts/yaneuraou.sh (56%) delete mode 100644 shoginet.py delete mode 100644 signals.py create mode 100644 src/config/client.ts create mode 100644 src/config/server.ts create mode 100644 src/consts.ts create mode 100644 src/engine.ts create mode 100644 src/http.ts create mode 100644 src/logger.ts create mode 100644 src/main.ts create mode 100644 src/stats-reporter.ts create mode 100644 src/systemd.ts create mode 100644 src/types.ts create mode 100644 src/version.ts create mode 100644 src/work/analysis.ts create mode 100644 src/work/move.ts create mode 100644 src/work/puzzle.ts create mode 100644 src/work/util.ts create mode 100644 src/worker-manager.ts create mode 100644 src/worker.ts delete mode 100644 systemd.py create mode 100644 test/main.ts create mode 100644 test/server-config.json create mode 100644 test/works.ts create mode 100644 tsconfig.json delete mode 100644 util.py delete mode 100644 worker.py diff --git a/.gitignore b/.gitignore index 8348214..7f32e4b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,2 @@ -.vscode/ -*.ini -YaneuraOu/ -Fairy-Stockfish/ -__pycache__/ -/YaneuraOu-by-gcc* -/fairy-stockfish-largeboard* \ No newline at end of file +.vscode +node_modules \ No newline at end of file diff --git a/README.md b/README.md index baa6d08..875a59d 100644 --- a/README.md +++ b/README.md @@ -1,43 +1,74 @@ -# Shoginet - Distributed Network for [lishogi.org](lishogi.org) - -Based on [fairyfishnet](https://github.com/gbtami/fairyfishnet). - -## How to setup - -### Linux - -You need to have both engines: - -- YaneuraOu NNUE -- Fairy-Stockfish - -To achieve this you can use the provided scripts - `build-yaneuraou.sh` for YaneuraOu and `build-fairy.sh` to build Fairy-Stockfish. -The scripts first clone [YaneuraOu github](https://github.com/yaneurao/YaneuraOu) or [Fairy-Stockfish](https://github.com/fairy-stockfish/Fairy-Stockfish) and then run the `make`. - -You can also try downloading YaneuraOu from https://github.com/yaneurao/YaneuraOu/actions using GitHub Actions artifacts. - -You can also download Fairy-Stockfish from [https://fairy-stockfish.github.io/download/](https://fairy-stockfish.github.io/download/), make sure to pick 'all-variants' - -To test that the engines work on you machine just run the engine `./YaneuraOu-by-gcc`(adjust command if necessary). and then enter the following commands: - -``` -usi -isready -position sfen lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1 moves 7g7f -go -``` - -If the engine didn't crash and you got some response, it looks like ti works. Yeah! Now test `./fairy-stockfish-largeboard_x86-64` in the same way. - -If you have YaneuraOu and Fairy-Stockfish ready and python3 installed just run `python3 ./shoginet.py`, it will ask you about what working directory you want to use, path to the engine and similar things, leaving everything default should be fine. - -Currently no key is required. -If you want to go over this setup step again, just delete shoginet.ini. - -### Windows - -Windows is not supported right now. Although almost everything should be fine. You will obviously have to compile YaneuraOu for windows and provide the correct path, when shoginet.py asks you the first time. - -## How it works - -Every once in a while shoginet running on your computer asks lishogi.org for some work. If someone requested analysis of their game on lishogi.org, you may receive this work. The work is a simple json containing mainly the initial position and sequence of moves. You then run engine analysis on these data and send the results back to lishogi.org. +# Shoginet + +**Distributed network for [Lishogi.org](https://lishogi.org)** + +## Installation + +```bash +git clone https://github.com/WandererXII/shoginet.git +cd shoginet +npm install +``` + +**Note:** You must obtain and install engines **before** running Shoginet: + +* **Linux - from source:** Use `./scripts/yaneuraou.sh` and `./scripts/fairy.sh` to download and build engines yourself. Make sure you have a C/C++ compiler and build tools installed. It will take a few minutes per engine. +* **Ready to use binary:** Make sure to download the correct version for your OS and CPU. + - YaneuraOu - [YaneuraOu repo releases](https://github.com/yaneurao/YaneuraOu/releases) + - Fairy Stockfish - [Fairy Stockfish website](https://fairy-stockfish.github.io/download/), download largeboard (all variants) version + +Do not forget to check if path to engines is correct set (next step) + +## Configuration + +Configuration is stored in `config` directory. Write your own overrides to `local.json`. + +Most importantly you want to make sure that engine path is correctly set. By default we look into `engines` directory. _Yaneuraou_ engine default name is `YaneuraOu-by-gcc` and _Fairy Stockfish_ default nane is `fairy-stockfish` + +## Usage + +**Run tests first** to make sure everything works, especially the engines: + +```bash +npm run test +``` + +You can start Shoginet directly by running: + +```bash +npm run start +``` + +You will probably want to run Shoginet with a process manager. For systemd (Linux) integration: + +```bash +npm run systemd > /etc/systemd/system/shoginet.service +sudo systemctl daemon-reload +sudo systemctl enable --now shoginet # enable and start +``` + +## Shoginet workflow + +1. **Start!** + - Shoginet is initiated and fetches config from the server. The config sets parameters for move generation, analysis and puzzle verification. + +2. **Request Work** + - Shoginet -> Lishogi: "Give me work!" + +3. **Receive Game** + - Lishogi -> Shoginet: "Here's a game to analyse" + - The work could be _analysis_, _move generation_ or _puzle verification_ + +4. **Analyze** + - Shoginet is working... + - This consumes CPU + +5. **Submit Results** + - Shoginet -> Lishogi: "Analysis result" + +6. **Repeat** + - Lishogi -> Shoginet: "Thanks, here's more work :)" + - Rinse & repeat + +7. **Stop** + - Stop Shoginet when you need CPU power diff --git a/biome.json b/biome.json new file mode 100644 index 0000000..9f8d223 --- /dev/null +++ b/biome.json @@ -0,0 +1,41 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.2.4/schema.json", + "files": { + "includes": ["**/src/**/*.ts", "**/test/**/*.ts"] + }, + "formatter": { + "enabled": true, + "lineWidth": 80, + "indentStyle": "space" + }, + "linter": { + "enabled": true, + "rules": { + "recommended": true, + "style": { + "noNonNullAssertion": "off" + }, + "suspicious": { + "noExplicitAny": "off", + "noPrototypeBuiltins": "off", + "noUnsafeDeclarationMerging": "off" + }, + "complexity": { + "useOptionalChain": "off" + }, + "correctness": {}, + "performance": {} + } + }, + "assist": { "actions": { "source": { "organizeImports": "on" } } }, + "vcs": { + "enabled": true, + "clientKind": "git", + "useIgnoreFile": true + }, + "javascript": { + "formatter": { + "quoteStyle": "single" + } + } +} diff --git a/config.py b/config.py deleted file mode 100644 index 503f4ab..0000000 --- a/config.py +++ /dev/null @@ -1,363 +0,0 @@ -import configparser -import argparse -import os -import multiprocessing -import sys -import typing -import re -import requests -import typing -from errors import ConfigError -from logger import CensorLogFilter -import consts -import util -import urllib.parse as urlparse -from engines import Engine -from logger import log - - -def load_conf(args: typing.Any) -> configparser.ConfigParser: - conf = configparser.ConfigParser() - conf.add_section("Shoginet") - conf.add_section("Engines") - - if not args.no_conf: - if not args.conf and not os.path.isfile(consts.DEFAULT_CONFIG): - return configure(args) - - config_file = args.conf or consts.DEFAULT_CONFIG - log.debug("Using config file: %s", config_file) - - if not conf.read(config_file): - raise ConfigError("Could not read config file: %s" % config_file) - - if hasattr(args, "engine_dir") and args.engine_dir is not None: - conf.set("Shoginet", "EngineDir", args.engine_dir) - if hasattr(args, "yaneuraou_command") and args.yaneuraou_command is not None: - conf.set("Shoginet", "YaneraOuCommand", args.yaneuraou_command) - if hasattr(args, "fairy_command") and args.fairy_command is not None: - conf.set("Shoginet", "FairyCommand", args.fairy_command) - if hasattr(args, "key") and args.key is not None: - conf.set("Shoginet", "Key", args.key) - if hasattr(args, "cores") and args.cores is not None: - conf.set("Shoginet", "Cores", args.cores) - if hasattr(args, "memory") and args.memory is not None: - conf.set("Shoginet", "Memory", args.memory) - if hasattr(args, "threads") and args.threads is not None: - conf.set("Shoginet", "Threads", str(args.threads)) - if hasattr(args, "endpoint") and args.endpoint is not None: - conf.set("Shoginet", "Endpoint", args.endpoint) - if hasattr(args, "fixed_backoff") and args.fixed_backoff is not None: - conf.set("Shoginet", "FixedBackoff", str(args.fixed_backoff)) - for option_name, option_value in args.setoptionYaneuraou: - conf.set("YaneuraOu", option_name.lower(), option_value) - for option_name, option_value in args.setoptionFairy: - conf.set("Fairy", option_name.lower(), option_value) - - log.addFilter(CensorLogFilter(conf_get(conf, "Key"))) - - return conf - - -def config_input(prompt: str, validator: typing.Callable[[str], typing.Any], out: typing.TextIO) -> typing.Any: - while True: - if out == sys.stdout: - inp = input(prompt) - else: - if prompt: - out.write(prompt) - out.flush() - - inp = input() - - try: - return validator(inp) - except ConfigError as error: - print(error, file=out) - - -def configure(args: typing.Any) -> configparser.ConfigParser: - if sys.stdout.isatty(): - out = sys.stdout - try: - # Unix: Importing for its side effect - import readline # noqa: F401 - except ImportError: - # Windows - pass - else: - out = sys.stderr - - print(file=out) - print("### Configuration", file=out) - print(file=out) - - conf = configparser.ConfigParser() - conf.add_section("Shoginet") - conf.add_section("YaneuraOu") - conf.add_section("Fairy") - - # Ensure the config file is going to be writable - config_file = os.path.abspath(args.conf or consts.DEFAULT_CONFIG) - if os.path.isfile(config_file): - conf.read(config_file) - with open(config_file, "r+"): - pass - else: - with open(config_file, "w"): - pass - os.remove(config_file) - - # Engines working directory - engine_dir = config_input("Engine working directory (default: %s): " % os.path.abspath("."), - validate_engine_dir, out) - conf.set("Shoginet", "EngineDir", engine_dir) - - # Engines command - print(file=out) - print("YaneuraOu is licensed under the GNU General Public License v3.", file=out) - print("You can find the source at: https://github.com/yaneuraou/YaneuraOu", file=out) - print(file=out) - print("You can build custom YaneuraOu yourself and provide", file=out) - print("the path or automatically download a precompiled binary.", file=out) - print(file=out) - yaneuraou_command = config_input("Path or command for yaneuraOu (default works on linux): ", - lambda v: validate_command( - v, conf), - out) - if not yaneuraou_command: - conf.remove_option("Shoginet", "YaneuraOuCommand") - else: - conf.set("Shoginet", "YaneuraOuCommand", yaneuraou_command) - - print(file=out) - print("Fairy-Stockfish is licensed under the GNU General Public License v3.", file=out) - print("You can find the source at: https://github.com/ianfab/Fairy-Stockfish", file=out) - print(file=out) - print("You can build custom Fairy-Stockfish yourself and provide", file=out) - print("the path or automatically download a precompiled binary.", file=out) - print(file=out) - fairy_command = config_input("Path or command for fairy stockfish (default works on linux): ", - lambda v: validate_command( - v, conf), - out) - if not fairy_command: - conf.remove_option("Shoginet", "FairyCommand") - else: - conf.set("Shoginet", "FairyCommand", fairy_command) - print(file=out) - - # Cores - max_cores = multiprocessing.cpu_count() - default_cores = max(1, max_cores - 1) - cores = config_input("Number of cores to use for engine threads (default %d, max %d): " % (default_cores, max_cores), - validate_cores, out) - conf.set("Shoginet", "Cores", str(cores)) - - # Advanced options - endpoint = args.endpoint or consts.DEFAULT_ENDPOINT - if config_input("Configure advanced options? (default: no) ", lambda o: str(util.parse_bool(o)), out): - endpoint = config_input("Shoginet API endpoint (default: %s): " % ( - endpoint, ), lambda inp: validate_endpoint(inp, endpoint), out) - - conf.set("Shoginet", "Endpoint", endpoint) - - # Change key? - key = None - if conf.has_option("Shoginet", "Key"): - if not config_input("Change Shoginet key? (default: no) ", lambda k: str(util.parse_bool(k)), out): - key = conf.get("Shoginet", "Key") - - # Key - if key is None: - key = config_input("Personal Shoginet key (append ! to force): ", - lambda v: validate_key(v, conf, network=True), out) - conf.set("Shoginet", "Key", key) - log.addFilter(CensorLogFilter(key)) - - # Confirm - print(file=out) - while not config_input("Done. Write configuration to %s now? (default: yes) " % (config_file, ), - lambda v: str(util.parse_bool(v, True)), out): - pass - - # Write configuration - with open(config_file, "w") as f: - conf.write(f) - - print("Configuration saved.", file=out) - return conf - - -def validate_engine_dir(engine_dir: typing.Optional[str]) -> str: - if not engine_dir or not engine_dir.strip(): - return os.path.abspath(".") - - engine_dir = os.path.abspath(os.path.expanduser(engine_dir.strip())) - - if not os.path.isdir(engine_dir): - raise ConfigError("EngineDir not found: %s" % engine_dir) - - return engine_dir - - -def validate_command(command: typing.Optional[str], conf: configparser.ConfigParser) -> typing.Optional[str]: - if not command or not command.strip(): - return None - - command = command.strip() - engine_dir = get_engine_dir(conf) - - # Ensure the required options are supported - engine = Engine(False, command, engine_dir) - engine.usi() - - del engine - - return command - - -def validate_cores(cores: typing.Optional[str]) -> int: - if not cores or cores.strip().lower() == "auto": - return max(1, multiprocessing.cpu_count() - 1) - - if cores.strip().lower() == "all": - return multiprocessing.cpu_count() - - try: - coresNum = int(cores.strip()) - except ValueError: - raise ConfigError("Number of cores must be an integer") - - if coresNum < 1: - raise ConfigError("Need at least one core") - - if coresNum > multiprocessing.cpu_count(): - raise ConfigError( - "At most %d cores available on your machine " % multiprocessing.cpu_count()) - - return coresNum - - -def validate_threads(threads: typing.Optional[str], conf: configparser.ConfigParser) -> int: - cores = validate_cores(conf_get(conf, "Cores")) - - if not threads or str(threads).strip().lower() == "auto": - return min(consts.DEFAULT_THREADS, cores) - - try: - threadsNum = int(str(threads).strip()) - except ValueError: - raise ConfigError("Number of threads must be an integer") - - if threadsNum < 1: - raise ConfigError("Need at least one thread per engine process") - - if threadsNum > cores: - raise ConfigError( - "%d cores is not enough to run %d threads" % (cores, threadsNum)) - - return threadsNum - - -def validate_memory(memory: typing.Optional[str], conf: configparser.ConfigParser) -> int: - cores = validate_cores(conf_get(conf, "Cores")) - threads = validate_threads(conf_get(conf, "Threads"), conf) - processes = cores // threads - - if not memory or not memory.strip() or memory.strip().lower() == "auto": - return processes * consts.HASH_DEFAULT - - try: - memoryNum = int(memory.strip()) - except ValueError: - raise ConfigError("Memory must be an integer") - - if memoryNum < processes * consts.HASH_MIN: - raise ConfigError("Not enough memory for a minimum of %d x %d MB in hash tables" % ( - processes, consts.HASH_MIN)) - - if memoryNum > processes * consts.HASH_MAX: - raise ConfigError("Cannot reasonably use more than %d x %d MB = %d MB for hash tables" % ( - processes, consts.HASH_MAX, processes * consts.HASH_MAX)) - - return memoryNum - - -def validate_endpoint(endpoint: typing.Optional[str], default: str = consts.DEFAULT_ENDPOINT) -> str: - if not endpoint or not endpoint.strip(): - return default - - if not endpoint.endswith("/"): - endpoint += "/" - - url_info = urlparse.urlparse(endpoint) - if url_info.scheme not in ["http", "https"]: - raise ConfigError( - "Endpoint does not have http:// or https:// URL scheme") - - return endpoint - - -def validate_key(key: typing.Optional[str], conf: configparser.ConfigParser, network: bool = False) -> str: - if not key or not key.strip(): - return "" - - key = key.strip() - - network = network and not key.endswith("!") - key = key.rstrip("!").strip() - - if not re.match(r"^[a-zA-Z0-9]+$", key): - raise ConfigError("Shoginet key is expected to be alphanumeric") - - if network: - response = requests.get(get_endpoint( - conf, "key/%s" % key), timeout=consts.HTTP_TIMEOUT) - if response.status_code == 404: - raise ConfigError("Invalid or inactive Shoginet key") - else: - response.raise_for_status() - - return key - - -def get_engine_dir(conf: configparser.ConfigParser) -> str: - return validate_engine_dir(conf_get(conf, "EngineDir")) - - -def get_endpoint(conf: configparser.ConfigParser, sub: str = "") -> str: - return urlparse.urljoin(validate_endpoint(conf_get(conf, "Endpoint")), sub) - - -def get_yaneuraou_command(conf: configparser.ConfigParser, update: bool = True) -> str: - yane_command = validate_command( - conf_get(conf, "YaneuraOuCommand"), conf) - if not yane_command: - filename = util.yaneuraou_filename() - return typing.cast(str, validate_command(os.path.join(".", filename), conf)) - else: - return yane_command - - -def get_fairy_command(conf: configparser.ConfigParser, update: bool = True) -> str: - fairy_command = validate_command( - conf_get(conf, "FairyCommand"), conf) - if not fairy_command: - filename = util.fairy_filename() - return typing.cast(str, validate_command(os.path.join(".", filename), conf)) - else: - return fairy_command - - -def get_key(conf: configparser.ConfigParser) -> str: - return validate_key(conf_get(conf, "Key"), conf, network=False) - - -def conf_get(conf: configparser.ConfigParser, key: str, default: typing.Optional[str] = None, section: str = "Shoginet") -> typing.Optional[str]: - if not conf.has_section(section): - return default - elif not conf.has_option(section, key): - return default - else: - return conf.get(section, key) diff --git a/config/default.json b/config/default.json new file mode 100644 index 0000000..a0ccbd9 --- /dev/null +++ b/config/default.json @@ -0,0 +1,18 @@ +{ + "workers": 1, + "engines": { + "yaneuraou": { + "path": "./engines/YaneuraOu-by-gcc", + "threads": 1, + "memory": 64 + }, + "fairy": { + "path": "./engines/fairy-stockfish", + "threads": 1, + "memory": 64 + } + }, + "logger": "info", + "endpoint": "http://localhost:9663", + "key": "" +} diff --git a/config/local.json b/config/local.json new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/config/local.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/config/test.json b/config/test.json new file mode 100644 index 0000000..cb6cdad --- /dev/null +++ b/config/test.json @@ -0,0 +1,4 @@ +{ + "endpoint": "http://localhost:1080", + "logger": "debug" +} \ No newline at end of file diff --git a/consts.py b/consts.py deleted file mode 100644 index 4a6b87a..0000000 --- a/consts.py +++ /dev/null @@ -1,20 +0,0 @@ -SN_VERSION = "4.0.0" -PROGRESS = 15 -ENGINE = 5 -LOGGING_VERBOSITY = 1 -DEFAULT_ENDPOINT = "https://lishogi.org/shoginet/" -DEFAULT_THREADS = 2 -HASH_MIN = 64 -HASH_DEFAULT = 256 -HASH_MAX = 512 -MAX_BACKOFF = 30.0 -MAX_FIXED_BACKOFF = 3.0 -HTTP_TIMEOUT = 15.0 -STAT_INTERVAL = 60.0 -DEFAULT_CONFIG = "shoginet.ini" -PROGRESS_REPORT_INTERVAL = 5.0 -LVL_SKILL = [-4, 0, 1, 3, 6, 12, 16, 20] -LVL_MOVETIMES = [50, 50, 75, 150, 200, 300, 400, 1000] -LVL_DEPTHS = [1, 1, 1, 2, 3, 6, 10, 22] -LVL_NODES = [1, 1, 5, 10, 15, 0, 0, 0] -DEAD_ENGINE_ERRORS = (EOFError, IOError, BrokenPipeError) diff --git a/cpuid.py b/cpuid.py deleted file mode 100644 index 29ae0a2..0000000 --- a/cpuid.py +++ /dev/null @@ -1,239 +0,0 @@ -import contextlib -import ctypes -import typing -import os -import sys -import platform -import typing -import threading -import subprocess -from logger import log -import struct -import string - - -@contextlib.contextmanager -def make_cpuid() -> typing.Any: - # Loosely based on cpuid.py by Anders Høst, licensed MIT: - # https://github.com/flababah/cpuid.py - - # Prepare system information - is_64bit = ctypes.sizeof(ctypes.c_void_p) == 8 - if platform.machine().lower() not in ["amd64", "x86_64", "x86", "i686"]: - raise OSError("Got no CPUID opcodes for %s" % platform.machine()) - - # Struct for return value - class CPUID_struct(ctypes.Structure): - _fields_ = [("eax", ctypes.c_uint32), - ("ebx", ctypes.c_uint32), - ("ecx", ctypes.c_uint32), - ("edx", ctypes.c_uint32)] - - # Select kernel32 or libc - if sys.platform == "win32": - libc = ctypes.windll.kernel32 - else: - libc = ctypes.cdll.LoadLibrary("") - - # Select opcodes - if is_64bit: - if sys.platform == "win32": - # Windows x86_64 - # Three first call registers : RCX, RDX, R8 - # Volatile registers : RAX, RCX, RDX, R8-11 - opc = [ - 0x53, # push %rbx - 0x89, 0xd0, # mov %edx,%eax - 0x49, 0x89, 0xc9, # mov %rcx,%r9 - 0x44, 0x89, 0xc1, # mov %r8d,%ecx - 0x0f, 0xa2, # cpuid - 0x41, 0x89, 0x01, # mov %eax,(%r9) - 0x41, 0x89, 0x59, 0x04, # mov %ebx,0x4(%r9) - 0x41, 0x89, 0x49, 0x08, # mov %ecx,0x8(%r9) - 0x41, 0x89, 0x51, 0x0c, # mov %edx,0xc(%r9) - 0x5b, # pop %rbx - 0xc3 # retq - ] - else: - # Posix x86_64 - # Three first call registers : RDI, RSI, RDX - # Volatile registers : RAX, RCX, RDX, RSI, RDI, R8-11 - opc = [ - 0x53, # push %rbx - 0x89, 0xf0, # mov %esi,%eax - 0x89, 0xd1, # mov %edx,%ecx - 0x0f, 0xa2, # cpuid - 0x89, 0x07, # mov %eax,(%rdi) - 0x89, 0x5f, 0x04, # mov %ebx,0x4(%rdi) - 0x89, 0x4f, 0x08, # mov %ecx,0x8(%rdi) - 0x89, 0x57, 0x0c, # mov %edx,0xc(%rdi) - 0x5b, # pop %rbx - 0xc3 # retq - ] - else: - # CDECL 32 bit - # Three first call registers : Stack (%esp) - # Volatile registers : EAX, ECX, EDX - opc = [ - 0x53, # push %ebx - 0x57, # push %edi - 0x8b, 0x7c, 0x24, 0x0c, # mov 0xc(%esp),%edi - 0x8b, 0x44, 0x24, 0x10, # mov 0x10(%esp),%eax - 0x8b, 0x4c, 0x24, 0x14, # mov 0x14(%esp),%ecx - 0x0f, 0xa2, # cpuid - 0x89, 0x07, # mov %eax,(%edi) - 0x89, 0x5f, 0x04, # mov %ebx,0x4(%edi) - 0x89, 0x4f, 0x08, # mov %ecx,0x8(%edi) - 0x89, 0x57, 0x0c, # mov %edx,0xc(%edi) - 0x5f, # pop %edi - 0x5b, # pop %ebx - 0xc3 # ret - ] - - code_size = len(opc) - code = (ctypes.c_ubyte * code_size)(*opc) - - if sys.platform == "win32": - # Allocate executable memory - libc.VirtualAlloc.restype = ctypes.c_void_p - libc.VirtualAlloc.argtypes = [ - ctypes.c_void_p, ctypes.c_size_t, ctypes.c_ulong, ctypes.c_ulong] - addr = libc.VirtualAlloc(None, code_size, 0x1000, 0x40) - if not addr: - raise MemoryError("Could not VirtualAlloc RWX memory") - else: - # Allocate memory - libc.valloc.restype = ctypes.c_void_p - libc.valloc.argtypes = [ctypes.c_size_t] - addr = libc.valloc(code_size) - if not addr: - raise MemoryError("Could not valloc memory") - - # Make executable - libc.mprotect.restype = ctypes.c_int - libc.mprotect.argtypes = [ - ctypes.c_void_p, ctypes.c_size_t, ctypes.c_int] - if 0 != libc.mprotect(addr, code_size, 1 | 2 | 4): - raise OSError("Failed to set RWX using mprotect") - - # Copy code to allocated executable memory. No need to flush instruction - # cache for CPUID. - ctypes.memmove(addr, code, code_size) - - # Create and yield callable - result = CPUID_struct() - func_type = ctypes.CFUNCTYPE(None, ctypes.POINTER( - CPUID_struct), ctypes.c_uint32, ctypes.c_uint32) - func_ptr = func_type(addr) - - def cpuid(eax: int, ecx: int = 0) -> typing.Any: - func_ptr(result, eax, ecx) - return result.eax, result.ebx, result.ecx, result.edx - - yield cpuid - - # Free - if sys.platform == "win32": - libc.VirtualFree.restype = ctypes.c_long - libc.VirtualFree.argtypes = [ - ctypes.c_void_p, ctypes.c_size_t, ctypes.c_ulong] - libc.VirtualFree(addr, 0, 0x8000) - else: - libc.free.restype = None - libc.free.argtypes = [ctypes.c_void_p] - libc.free(addr) - - -def open_process(command: typing.List[str], cwd: typing.Optional[str] = None, shell: bool = True, _popen_lock: threading.Lock = threading.Lock()) -> subprocess.Popen: - kwargs: typing.Dict[str, typing.Any] = { - "shell": shell, - "stdout": subprocess.PIPE, - "stderr": subprocess.STDOUT, - "stdin": subprocess.PIPE, - "bufsize": 1, # Line buffered - "universal_newlines": True, - } - - if cwd is not None: - kwargs["cwd"] = cwd - - # Prevent signal propagation from parent process - if sys.platform == "win32": - kwargs["creationflags"] = subprocess.CREATE_NEW_PROCESS_GROUP - else: - kwargs["preexec_fn"] = os.setpgrp - - with _popen_lock: - return subprocess.Popen(command, **kwargs) - - -def detect_cpu_capabilities() -> typing.Tuple[str, bool, bool, bool, bool]: - # Detects support for popcnt and pext instructions - vendor, modern, bmi2, sse42, avx2 = "", False, False, False, False - - # Run cpuid in subprocess for robustness in case of segfaults - cmd = [] - cmd.append(sys.executable) - cmd.append(__file__) - - process = typing.cast(typing.Any, open_process(cmd, shell=False)) - - # Parse output - while True: - line = process.stdout.readline() - if not line: - break - - line = line.rstrip() - log.debug("cpuid >> %s", line) - if not line: - continue - - columns = line.split() - if columns[0] == "CPUID": - pass - elif len(columns) == 5 and all(all(c in string.hexdigits for c in col) for col in columns): - eax, a, b, c, d = [int(col, 16) for col in columns] - - # vendor - if eax == 0: - vendor = struct.pack("III", b, d, c).decode("utf-8") - - # popcnt - if eax == 1 and c & (1 << 23): - modern = True - - # pext - if eax == 7 and b & (1 << 8): - bmi2 = True - - if eax == 1 and c & (1 << 20): - sse42 = True - - if eax == 7 and b & (1 << 5): - avx2 = True - else: - log.warning("Unexpected cpuid output: %s", line) - - # Done - process.communicate() - if process.returncode != 0: - log.error("cpuid exited with status code %d", process.returncode) - - return vendor, modern, bmi2, sse42, avx2 - - -def cpuid() -> None: - with make_cpuid() as cpuid: - headers = ["CPUID", "EAX", "EBX", "ECX", "EDX"] - print(" ".join(header.ljust(8) for header in headers).rstrip()) - - for eax in [0x0, 0x80000000]: - highest, _, _, _ = cpuid(eax) - for eax in range(eax, highest + 1): - a, b, c, d = cpuid(eax) - print("%08x %08x %08x %08x %08x" % (eax, a, b, c, d)) - - -if __name__ == "__main__": - cpuid() diff --git a/engines.py b/engines.py deleted file mode 100644 index 5ac81d7..0000000 --- a/engines.py +++ /dev/null @@ -1,258 +0,0 @@ -import threading -import subprocess -import typing -import os -import sys -import signal -import util -from consts import ENGINE -from logger import log - - -class Engine: - - def __init__(self, variants: bool, command: str, cwd: typing.Optional[str] = None, shell: bool = True, _popen_lock: threading.Lock = threading.Lock()) -> None: - kwargs: typing.Dict[str, typing.Any] = { - "shell": shell, - "stdout": subprocess.PIPE, - "stderr": subprocess.STDOUT, - "stdin": subprocess.PIPE, - "bufsize": 1, # Line buffered - "universal_newlines": True, - } - - if cwd is not None: - kwargs["cwd"] = cwd - - # Prevent signal propagation from parent process - if sys.platform == "win32": - kwargs["creationflags"] = subprocess.CREATE_NEW_PROCESS_GROUP - else: - kwargs["preexec_fn"] = os.setpgrp - - self.variants = variants - self.name = "fairy" if variants else "yaneuraou" - with _popen_lock: - self.engine_proccess = subprocess.Popen(command, **kwargs) - - def __del__(self) -> None: - if sys.platform == "win32": - self.engine_proccess.send_signal(signal.CTRL_BREAK_EVENT) - else: - os.killpg(self.engine_proccess.pid, signal.SIGKILL) - - # Try to avoid zombie by cleaning up any leftover stdout - try: - self.engine_proccess.communicate() - except IOError: - # Can happen from duplicate communication to engine_proccess - pass - - def send(self, line: str) -> None: - log.log(ENGINE, "%s(%s) << %s", - self.engine_proccess.pid, self.name, line) - assert self.engine_proccess.stdin is not None - self.engine_proccess.stdin.write(line + "\n") - self.engine_proccess.stdin.flush() - - def recv(self) -> str: - while True: - assert self.engine_proccess.stdout is not None - line = self.engine_proccess.stdout.readline() - if line == "": - raise EOFError() - - line = line.rstrip() - - log.log(ENGINE, "%s(%s) >> %s", - self.engine_proccess.pid, self.name, line) - - if line: - return line - - def recv_usi(self) -> typing.List[str]: - command_and_args = self.recv().split(None, 1) - if len(command_and_args) == 1: - return [command_and_args[0], ""] - else: - return command_and_args - - def usi(self) -> typing.Dict[str, str]: - self.send("usi") - - engine_info: typing.Dict[str, str] = {} - - while True: - command, arg = self.recv_usi() - - if command == "usiok": - return engine_info - elif command == "id": - name_and_value = arg.split(None, 1) - if len(name_and_value) == 2: - engine_info[name_and_value[0]] = name_and_value[1] - elif command == "option" or command == "Fairy-Stockfish": - pass - else: - log.warning( - "Unexpected engine response to usi: %s %s", command, arg) - - def isready(self) -> None: - self.send("isready") - while True: - command, arg = self.recv_usi() - if command == "readyok": - break - elif command == "info" and arg.startswith("string "): - pass - else: - log.warning( - "Unexpected engine response to isready: %s %s", command, arg) - - def setoption(self, name: str, value: str) -> None: - if value is True: - value = "true" - elif value is False: - value = "false" - elif value is None: - value = "none" - - self.send("setoption name %s value %s" % (name, value)) - - def set_variant_options(self, variant: str) -> None: - if not self.variants: - return - variant = variant.lower() - if variant == "standard": - self.setoption("USI_Variant", "shogi") - else: - self.setoption("USI_Variant", variant) - - def recv_bestmove(self) -> typing.Optional[str]: - while True: - command, arg = self.recv_usi() - if command == "bestmove": - bestmove = arg.split()[0] - if bestmove and bestmove != "(none)" and bestmove != "resign": - return bestmove - else: - return None - elif command == "info": - continue - else: - log.warning( - "Unexpected engine response to go: %s %s", command, arg) - - def recv_analysis(self) -> typing.Any: - scores: typing.List[str] = [] - nodes: typing.List[str] = [] - times: typing.List[str] = [] - pvs: typing.List[str] = [] - - bound: typing.List[str] = [] - - while True: - command, arg = self.recv_usi() - - if command == "bestmove": - return scores, nodes, times, pvs - elif command == "info": - depth: typing.Optional[int] = None - multipv = 1 - - def set_table(arr: typing.List[typing.Any], value: typing.Any) -> None: - while len(arr) < multipv: - arr.append([]) - while len(arr[multipv - 1]) <= (depth or 0): - arr[multipv - 1].append(None) - arr[multipv - 1][depth] = value - - tokens = (arg or "").split(" ") - while tokens: - parameter = tokens.pop(0) - - if parameter == "multipv": - multipv = int(tokens.pop(0)) - elif parameter == "depth": - depth = int(tokens.pop(0)) - elif parameter == "nodes": - set_table(nodes, int(tokens.pop(0))) - elif parameter == "time": - set_table(times, int(tokens.pop(0))) - elif parameter == "score": - kind = tokens.pop(0) - value = util.encode_score(kind, int(tokens.pop(0))) - is_bound = False - if tokens and tokens[0] in ["lowerbound", "upperbound"]: - is_bound = True - tokens.pop(0) - - was_bound = depth is None or len(bound) < multipv or len( - bound[multipv - 1]) <= depth or bound[multipv - 1][depth] - set_table(bound, is_bound) - - if was_bound or not is_bound: - set_table(scores, value) - elif parameter == "pv": - set_table(pvs, " ".join(tokens)) - break - else: - log.warning( - "Unexpected engine response to go: %s %s", command, arg) - - def recv_puzzle_analysis(self) -> typing.Any: - scores = [None for i in range(0, 3)] - while True: - command, arg = self.recv_usi() - - if command == "bestmove": - bestmove = arg.split()[0] - if bestmove and bestmove != "(none)" and bestmove != "resign": - return bestmove, [score for score in scores if score is not None] - else: - return None, None - elif command == "info": - multipv = 1 - score = None - tokens = (arg or "").split(" ") - while tokens: - token = tokens.pop(0) - if token == "multipv": - multipv = int(tokens.pop(0)) - elif token == "score": - kind = tokens.pop(0) - score = util.encode_score(kind, int(tokens.pop(0))) - if score is not None and len(scores) >= multipv: - scores[multipv - 1] = score - else: - log.warning( - "Unexpected engine response to go: %s %s", command, arg) - - def go(self, position: str, moves: typing.List[str], movetime: typing.Optional[int] = None, clock: typing.Optional[typing.Dict[str, int]] = None, depth: typing.Optional[int] = None, nodes: typing.Optional[int] = None) -> None: - self.send("position sfen %s moves %s" % (position, " ".join(moves))) - - builder = [] - builder.append("go") - if movetime is not None: - builder.append("movetime") - builder.append(str(movetime)) - if nodes is not None: - builder.append("nodes") - builder.append(str(nodes)) - if depth is not None: - builder.append("depth") - builder.append(str(depth)) - if clock is not None: - builder.append("btime") - builder.append(str(clock["btime"] * 10)) - builder.append("wtime") - builder.append(str(clock["wtime"] * 10)) - builder.append("byoyomi") - builder.append(str(clock["byo"] * 1000)) - if(clock["inc"] > 0): - builder.append("binc") - builder.append(str(clock["inc"] * 1000)) - builder.append("winc") - builder.append(str(clock["inc"] * 1000)) - - self.send(" ".join(builder)) diff --git a/engines/.gitignore b/engines/.gitignore new file mode 100644 index 0000000..c96a04f --- /dev/null +++ b/engines/.gitignore @@ -0,0 +1,2 @@ +* +!.gitignore \ No newline at end of file diff --git a/errors.py b/errors.py deleted file mode 100644 index f79b99d..0000000 --- a/errors.py +++ /dev/null @@ -1,14 +0,0 @@ -class ConfigError(Exception): - pass - - -class UpdateRequired(Exception): - pass - - -class Shutdown(Exception): - pass - - -class ShutdownSoon(Exception): - pass diff --git a/intro.py b/intro.py deleted file mode 100644 index a20cded..0000000 --- a/intro.py +++ /dev/null @@ -1,16 +0,0 @@ -import consts - - -def intro() -> str: - return r""" -. _________ . . -. (.. \_ , |\ /| -. \ O \ /| \ \/ / -. \______ \/ | \ / _ _ _ _ _ -. vvvv\ \ | / | ___| |__ ___ __ _(_)| \ | | ___| |_ -. \^^^^ == \_/ | / __| '_ \ / _ \ / _` | || \| |/ _ \ __| -. `\_ === \. | \__ \ | | | (_) | (_| | || |\ | __/ |_ -. / /\_ \ / | |___/_| |_|\___/ \__, |_||_| \_|\___|\__| %s -. |/ \_ \| / |___/ -. \________/ Distributed YaneuraOu analysis for lishogi -""".lstrip() % consts.SN_VERSION diff --git a/logger.py b/logger.py deleted file mode 100644 index ffc7b1a..0000000 --- a/logger.py +++ /dev/null @@ -1,130 +0,0 @@ -import logging -import sys -import collections -import typing -from consts import PROGRESS, ENGINE, LOGGING_VERBOSITY - - -logging.addLevelName(PROGRESS, "PROGRESS") -logging.addLevelName(ENGINE, "ENGINE") - - -class LogFormatter(logging.Formatter): - def format(self, record: logging.LogRecord) -> str: - # Format message - msg = super(LogFormatter, self).format(record) - - # Add level name - if record.levelno in [logging.INFO, PROGRESS]: - with_level = msg - else: - with_level = "%s: %s" % (record.levelname, msg) - - # Add thread name - if record.threadName == "MainThread": - return with_level - else: - return "%s: %s" % (record.threadName, with_level) - - -class CollapsingLogHandler(logging.StreamHandler): - def __init__(self, stream: typing.TextIO = sys.stdout) -> None: - super(CollapsingLogHandler, self).__init__(stream) - self.last_level = logging.INFO - self.last_len = 0 - - def emit(self, record: logging.LogRecord) -> None: - try: - if self.last_level == PROGRESS: - if record.levelno == PROGRESS: - self.stream.write("\r") - else: - self.stream.write("\n") - - msg = self.format(record) - if record.levelno == PROGRESS: - self.stream.write(msg.ljust(self.last_len)) - self.last_len = max(len(msg), self.last_len) - else: - self.last_len = 0 - self.stream.write(msg) - self.stream.write("\n") - - self.last_level = record.levelno - self.flush() - except Exception: - self.handleError(record) - - -class TailLogHandler(logging.Handler): - def __init__(self, capacity: int, max_level: int, flush_level: int, target_handler: logging.StreamHandler) -> None: - super(TailLogHandler, self).__init__() - self.buffer: collections.deque = collections.deque(maxlen=capacity) - self.max_level = max_level - self.flush_level = flush_level - self.target_handler = target_handler - - def emit(self, record: logging.LogRecord) -> None: - if record.levelno < self.max_level: - self.buffer.append(record) - - if record.levelno >= self.flush_level: - while self.buffer: - record = self.buffer.popleft() - self.target_handler.handle(record) - - -class CensorLogFilter(logging.Filter): - def __init__(self, keyword: typing.Optional[str]) -> None: - self.keyword = keyword - - def censor(self, msg: typing.Any) -> str: - if self.keyword and type(msg) is str: - return msg.replace(self.keyword, "*" * len(self.keyword)) - else: - return msg - - def filter(self, record: logging.LogRecord) -> bool: - record.msg = self.censor(record.msg) - if (record.args): - record.args = tuple(self.censor(arg) if isinstance( - arg, str) else arg for arg in record.args) - return True - - -def setup_logging(verbosity: int) -> logging.Logger: - logger = logging.getLogger() - logger.setLevel(ENGINE) - stream = sys.stdout - handler = logging.StreamHandler(stream) - - if verbosity >= 3: - handler.setLevel(ENGINE) - elif verbosity >= 2: - handler.setLevel(logging.DEBUG) - elif verbosity >= 1: - handler.setLevel(PROGRESS) - else: - if stream.isatty(): - handler = CollapsingLogHandler(stream) - handler.setLevel(PROGRESS) - else: - handler.setLevel(logging.INFO) - - if verbosity < 2: - logging.getLogger("urllib3").setLevel(logging.WARNING) - logging.getLogger("requests.packages.urllib3").setLevel( - logging.WARNING) - - tail_target = logging.StreamHandler(stream) - tail_target.setFormatter(LogFormatter()) - logger.addHandler(TailLogHandler( - 35, handler.level, logging.ERROR, tail_target)) - - handler.setFormatter(LogFormatter()) - logger.addHandler(handler) - - return logger - - -log = setup_logging(LOGGING_VERBOSITY) diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..44b3f61 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,1092 @@ +{ + "name": "shoginet", + "version": "5.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "shoginet", + "version": "5.0.0", + "license": "AGPL-3.0-or-later", + "dependencies": { + "@biomejs/biome": "2.2.4", + "@types/config": "^3.3.5", + "@types/node": "^22", + "@types/semver": "^7.7.1", + "config": "^4.1.1", + "got": "^14.4.8", + "http-status-codes": "^2.3.0", + "semver": "^7.7.2", + "shogiops": "^0.19.0", + "tslog": "^4.10.2", + "tsx": "^4.20.5", + "typescript": "^5.9.2" + }, + "engines": { + "node": ">=22" + } + }, + "node_modules/@badrap/result": { + "version": "0.2.13", + "resolved": "https://registry.npmjs.org/@badrap/result/-/result-0.2.13.tgz", + "integrity": "sha512-Qvyzz0dmGY0h8pwvBFo1BznAKf5Y5NXIDiqhPALWtfU7oHbAToCtPu4FlYQ3uysskSWLx8GUiyhe0nv0nDd/7Q==", + "license": "MIT" + }, + "node_modules/@biomejs/biome": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/biome/-/biome-2.2.4.tgz", + "integrity": "sha512-TBHU5bUy/Ok6m8c0y3pZiuO/BZoY/OcGxoLlrfQof5s8ISVwbVBdFINPQZyFfKwil8XibYWb7JMwnT8wT4WVPg==", + "license": "MIT OR Apache-2.0", + "bin": { + "biome": "bin/biome" + }, + "engines": { + "node": ">=14.21.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/biome" + }, + "optionalDependencies": { + "@biomejs/cli-darwin-arm64": "2.2.4", + "@biomejs/cli-darwin-x64": "2.2.4", + "@biomejs/cli-linux-arm64": "2.2.4", + "@biomejs/cli-linux-arm64-musl": "2.2.4", + "@biomejs/cli-linux-x64": "2.2.4", + "@biomejs/cli-linux-x64-musl": "2.2.4", + "@biomejs/cli-win32-arm64": "2.2.4", + "@biomejs/cli-win32-x64": "2.2.4" + } + }, + "node_modules/@biomejs/cli-darwin-arm64": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-arm64/-/cli-darwin-arm64-2.2.4.tgz", + "integrity": "sha512-RJe2uiyaloN4hne4d2+qVj3d3gFJFbmrr5PYtkkjei1O9c+BjGXgpUPVbi8Pl8syumhzJjFsSIYkcLt2VlVLMA==", + "cpu": [ + "arm64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-darwin-x64": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-darwin-x64/-/cli-darwin-x64-2.2.4.tgz", + "integrity": "sha512-cFsdB4ePanVWfTnPVaUX+yr8qV8ifxjBKMkZwN7gKb20qXPxd/PmwqUH8mY5wnM9+U0QwM76CxFyBRJhC9tQwg==", + "cpu": [ + "x64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64/-/cli-linux-arm64-2.2.4.tgz", + "integrity": "sha512-M/Iz48p4NAzMXOuH+tsn5BvG/Jb07KOMTdSVwJpicmhN309BeEyRyQX+n1XDF0JVSlu28+hiTQ2L4rZPvu7nMw==", + "cpu": [ + "arm64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-arm64-musl": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-arm64-musl/-/cli-linux-arm64-musl-2.2.4.tgz", + "integrity": "sha512-7TNPkMQEWfjvJDaZRSkDCPT/2r5ESFPKx+TEev+I2BXDGIjfCZk2+b88FOhnJNHtksbOZv8ZWnxrA5gyTYhSsQ==", + "cpu": [ + "arm64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64/-/cli-linux-x64-2.2.4.tgz", + "integrity": "sha512-orr3nnf2Dpb2ssl6aihQtvcKtLySLta4E2UcXdp7+RTa7mfJjBgIsbS0B9GC8gVu0hjOu021aU8b3/I1tn+pVQ==", + "cpu": [ + "x64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-linux-x64-musl": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-linux-x64-musl/-/cli-linux-x64-musl-2.2.4.tgz", + "integrity": "sha512-m41nFDS0ksXK2gwXL6W6yZTYPMH0LughqbsxInSKetoH6morVj43szqKx79Iudkp8WRT5SxSh7qVb8KCUiewGg==", + "cpu": [ + "x64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-arm64": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-arm64/-/cli-win32-arm64-2.2.4.tgz", + "integrity": "sha512-NXnfTeKHDFUWfxAefa57DiGmu9VyKi0cDqFpdI+1hJWQjGJhJutHPX0b5m+eXvTKOaf+brU+P0JrQAZMb5yYaQ==", + "cpu": [ + "arm64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@biomejs/cli-win32-x64": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/@biomejs/cli-win32-x64/-/cli-win32-x64-2.2.4.tgz", + "integrity": "sha512-3Y4V4zVRarVh/B/eSHczR4LYoSVyv3Dfuvm3cWs5w/HScccS0+Wt/lHOcDTRYeHjQmMYVC3rIRWqyN2EI52+zg==", + "cpu": [ + "x64" + ], + "license": "MIT OR Apache-2.0", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=14.21.3" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.9.tgz", + "integrity": "sha512-OaGtL73Jck6pBKjNIe24BnFE6agGl+6KxDtTfHhy1HmhthfKouEcOhqpSL64K4/0WCtbKFLOdzD/44cJ4k9opA==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.9.tgz", + "integrity": "sha512-5WNI1DaMtxQ7t7B6xa572XMXpHAaI/9Hnhk8lcxF4zVN4xstUgTlvuGDorBguKEnZO70qwEcLpfifMLoxiPqHQ==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.9.tgz", + "integrity": "sha512-IDrddSmpSv51ftWslJMvl3Q2ZT98fUSL2/rlUXuVqRXHCs5EUF1/f+jbjF5+NG9UffUDMCiTyh8iec7u8RlTLg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.9.tgz", + "integrity": "sha512-I853iMZ1hWZdNllhVZKm34f4wErd4lMyeV7BLzEExGEIZYsOzqDWDf+y082izYUE8gtJnYHdeDpN/6tUdwvfiw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.9.tgz", + "integrity": "sha512-XIpIDMAjOELi/9PB30vEbVMs3GV1v2zkkPnuyRRURbhqjyzIINwj+nbQATh4H9GxUgH1kFsEyQMxwiLFKUS6Rg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.9.tgz", + "integrity": "sha512-jhHfBzjYTA1IQu8VyrjCX4ApJDnH+ez+IYVEoJHeqJm9VhG9Dh2BYaJritkYK3vMaXrf7Ogr/0MQ8/MeIefsPQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.9.tgz", + "integrity": "sha512-z93DmbnY6fX9+KdD4Ue/H6sYs+bhFQJNCPZsi4XWJoYblUqT06MQUdBCpcSfuiN72AbqeBFu5LVQTjfXDE2A6Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.9.tgz", + "integrity": "sha512-mrKX6H/vOyo5v71YfXWJxLVxgy1kyt1MQaD8wZJgJfG4gq4DpQGpgTB74e5yBeQdyMTbgxp0YtNj7NuHN0PoZg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.9.tgz", + "integrity": "sha512-HBU2Xv78SMgaydBmdor38lg8YDnFKSARg1Q6AT0/y2ezUAKiZvc211RDFHlEZRFNRVhcMamiToo7bDx3VEOYQw==", + "cpu": [ + "arm" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.9.tgz", + "integrity": "sha512-BlB7bIcLT3G26urh5Dmse7fiLmLXnRlopw4s8DalgZ8ef79Jj4aUcYbk90g8iCa2467HX8SAIidbL7gsqXHdRw==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.9.tgz", + "integrity": "sha512-e7S3MOJPZGp2QW6AK6+Ly81rC7oOSerQ+P8L0ta4FhVi+/j/v2yZzx5CqqDaWjtPFfYz21Vi1S0auHrap3Ma3A==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.9.tgz", + "integrity": "sha512-Sbe10Bnn0oUAB2AalYztvGcK+o6YFFA/9829PhOCUS9vkJElXGdphz0A3DbMdP8gmKkqPmPcMJmJOrI3VYB1JQ==", + "cpu": [ + "loong64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.9.tgz", + "integrity": "sha512-YcM5br0mVyZw2jcQeLIkhWtKPeVfAerES5PvOzaDxVtIyZ2NUBZKNLjC5z3/fUlDgT6w89VsxP2qzNipOaaDyA==", + "cpu": [ + "mips64el" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.9.tgz", + "integrity": "sha512-++0HQvasdo20JytyDpFvQtNrEsAgNG2CY1CLMwGXfFTKGBGQT3bOeLSYE2l1fYdvML5KUuwn9Z8L1EWe2tzs1w==", + "cpu": [ + "ppc64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.9.tgz", + "integrity": "sha512-uNIBa279Y3fkjV+2cUjx36xkx7eSjb8IvnL01eXUKXez/CBHNRw5ekCGMPM0BcmqBxBcdgUWuUXmVWwm4CH9kg==", + "cpu": [ + "riscv64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.9.tgz", + "integrity": "sha512-Mfiphvp3MjC/lctb+7D287Xw1DGzqJPb/J2aHHcHxflUo+8tmN/6d4k6I2yFR7BVo5/g7x2Monq4+Yew0EHRIA==", + "cpu": [ + "s390x" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.9.tgz", + "integrity": "sha512-iSwByxzRe48YVkmpbgoxVzn76BXjlYFXC7NvLYq+b+kDjyyk30J0JY47DIn8z1MO3K0oSl9fZoRmZPQI4Hklzg==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.9.tgz", + "integrity": "sha512-9jNJl6FqaUG+COdQMjSCGW4QiMHH88xWbvZ+kRVblZsWrkXlABuGdFJ1E9L7HK+T0Yqd4akKNa/lO0+jDxQD4Q==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.9.tgz", + "integrity": "sha512-RLLdkflmqRG8KanPGOU7Rpg829ZHu8nFy5Pqdi9U01VYtG9Y0zOG6Vr2z4/S+/3zIyOxiK6cCeYNWOFR9QP87g==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.9.tgz", + "integrity": "sha512-YaFBlPGeDasft5IIM+CQAhJAqS3St3nJzDEgsgFixcfZeyGPCd6eJBWzke5piZuZ7CtL656eOSYKk4Ls2C0FRQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.9.tgz", + "integrity": "sha512-1MkgTCuvMGWuqVtAvkpkXFmtL8XhWy+j4jaSO2wxfJtilVCi0ZE37b8uOdMItIHz4I6z1bWWtEX4CJwcKYLcuA==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/openharmony-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.9.tgz", + "integrity": "sha512-4Xd0xNiMVXKh6Fa7HEJQbrpP3m3DDn43jKxMjxLLRjWnRsfxjORYJlXPO4JNcXtOyfajXorRKY9NkOpTHptErg==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.9.tgz", + "integrity": "sha512-WjH4s6hzo00nNezhp3wFIAfmGZ8U7KtrJNlFMRKxiI9mxEK1scOMAaa9i4crUtu+tBr+0IN6JCuAcSBJZfnphw==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.9.tgz", + "integrity": "sha512-mGFrVJHmZiRqmP8xFOc6b84/7xa5y5YvR1x8djzXpJBSv/UsNK6aqec+6JDjConTgvvQefdGhFDAs2DLAds6gQ==", + "cpu": [ + "arm64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.9.tgz", + "integrity": "sha512-b33gLVU2k11nVx1OhX3C8QQP6UHQK4ZtN56oFWvVXvz2VkDoe6fbG8TOgHFxEvqeqohmRnIHe5A1+HADk4OQww==", + "cpu": [ + "ia32" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.9.tgz", + "integrity": "sha512-PPOl1mi6lpLNQxnGoyAfschAodRFYXJ+9fs6WHXz7CSWKbOqiMZsubC+BQsVKuul+3vKLuwTHsS2c2y9EoKwxQ==", + "cpu": [ + "x64" + ], + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=18" + } + }, + "node_modules/@sec-ant/readable-stream": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", + "integrity": "sha512-831qok9r2t8AlxLko40y2ebgSDhenenCatLVeW/uBtnHPyhHOvG0C7TvfgecV+wHzIm5KUICgzmVpWS+IMEAeg==", + "license": "MIT" + }, + "node_modules/@sindresorhus/is": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-7.0.2.tgz", + "integrity": "sha512-d9xRovfKNz1SKieM0qJdO+PQonjnnIfSNWfHYnBSJ9hkjm0ZPw6HlxscDXYstp3z+7V2GOFHc+J0CYrYTjqCJw==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/@szmarczak/http-timer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-5.0.1.tgz", + "integrity": "sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==", + "license": "MIT", + "dependencies": { + "defer-to-connect": "^2.0.1" + }, + "engines": { + "node": ">=14.16" + } + }, + "node_modules/@types/config": { + "version": "3.3.5", + "resolved": "https://registry.npmjs.org/@types/config/-/config-3.3.5.tgz", + "integrity": "sha512-itq2HtXQBrNUKwMNZnb9mBRE3T99VYCdl1gjST9rq+9kFaB1iMMGuDeZnP88qid73DnpAMKH9ZolqDpS1Lz7+w==", + "license": "MIT" + }, + "node_modules/@types/http-cache-semantics": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", + "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "22.18.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.1.tgz", + "integrity": "sha512-rzSDyhn4cYznVG+PCzGe1lwuMYJrcBS1fc3JqSa2PvtABwWo+dZ1ij5OVok3tqfpEBCBoaR4d7upFJk73HRJDw==", + "license": "MIT", + "dependencies": { + "undici-types": "~6.21.0" + } + }, + "node_modules/@types/semver": { + "version": "7.7.1", + "resolved": "https://registry.npmjs.org/@types/semver/-/semver-7.7.1.tgz", + "integrity": "sha512-FmgJfu+MOcQ370SD0ev7EI8TlCAfKYU+B4m5T3yXc1CiRN94g/SZPtsCkk506aUDtlMnFZvasDwHHUcZUEaYuA==", + "license": "MIT" + }, + "node_modules/cacheable-lookup": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-7.0.0.tgz", + "integrity": "sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==", + "license": "MIT", + "engines": { + "node": ">=14.16" + } + }, + "node_modules/cacheable-request": { + "version": "12.0.1", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-12.0.1.tgz", + "integrity": "sha512-Yo9wGIQUaAfIbk+qY0X4cDQgCosecfBe3V9NSyeY4qPC2SAkbCS4Xj79VP8WOzitpJUZKc/wsRCYF5ariDIwkg==", + "license": "MIT", + "dependencies": { + "@types/http-cache-semantics": "^4.0.4", + "get-stream": "^9.0.1", + "http-cache-semantics": "^4.1.1", + "keyv": "^4.5.4", + "mimic-response": "^4.0.0", + "normalize-url": "^8.0.1", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/config": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/config/-/config-4.1.1.tgz", + "integrity": "sha512-jljfwqNZ7QHwAW9Z9NDZdJARFiu5pjLqQO0K4ooY22iY/bIY78n0afI4ANEawfgQOxri0K/3oTayX8XIauWcLA==", + "license": "MIT", + "dependencies": { + "json5": "^2.2.3" + }, + "engines": { + "node": ">= 20.0.0" + } + }, + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "license": "MIT", + "dependencies": { + "mimic-response": "^3.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/esbuild": { + "version": "0.25.9", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.9.tgz", + "integrity": "sha512-CRbODhYyQx3qp7ZEwzxOk4JBqmD/seJrzPa/cGjY1VtIn5E09Oi9/dB4JwctnfZ8Q8iT7rioVv5k/FNT/uf54g==", + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.25.9", + "@esbuild/android-arm": "0.25.9", + "@esbuild/android-arm64": "0.25.9", + "@esbuild/android-x64": "0.25.9", + "@esbuild/darwin-arm64": "0.25.9", + "@esbuild/darwin-x64": "0.25.9", + "@esbuild/freebsd-arm64": "0.25.9", + "@esbuild/freebsd-x64": "0.25.9", + "@esbuild/linux-arm": "0.25.9", + "@esbuild/linux-arm64": "0.25.9", + "@esbuild/linux-ia32": "0.25.9", + "@esbuild/linux-loong64": "0.25.9", + "@esbuild/linux-mips64el": "0.25.9", + "@esbuild/linux-ppc64": "0.25.9", + "@esbuild/linux-riscv64": "0.25.9", + "@esbuild/linux-s390x": "0.25.9", + "@esbuild/linux-x64": "0.25.9", + "@esbuild/netbsd-arm64": "0.25.9", + "@esbuild/netbsd-x64": "0.25.9", + "@esbuild/openbsd-arm64": "0.25.9", + "@esbuild/openbsd-x64": "0.25.9", + "@esbuild/openharmony-arm64": "0.25.9", + "@esbuild/sunos-x64": "0.25.9", + "@esbuild/win32-arm64": "0.25.9", + "@esbuild/win32-ia32": "0.25.9", + "@esbuild/win32-x64": "0.25.9" + } + }, + "node_modules/form-data-encoder": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-4.1.0.tgz", + "integrity": "sha512-G6NsmEW15s0Uw9XnCg+33H3ViYRyiM0hMrMhhqQOR8NFc5GhYrI+6I3u7OTw7b91J2g8rtvMBZJDbcGb2YUniw==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-stream": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-9.0.1.tgz", + "integrity": "sha512-kVCxPF3vQM/N0B1PmoqVUqgHP+EeVjmZSQn+1oCRPxd2P21P2F19lIgbR3HBosbB1PUhOAoctJnfEn2GbN2eZA==", + "license": "MIT", + "dependencies": { + "@sec-ant/readable-stream": "^0.4.1", + "is-stream": "^4.0.1" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-stream/node_modules/is-stream": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-4.0.1.tgz", + "integrity": "sha512-Dnz92NInDqYckGEUJv689RbRiTSEHCQ7wOVeALbkOz999YpqT46yMRIGtSNl2iCL1waAZSx40+h59NV/EwzV/A==", + "license": "MIT", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/get-tsconfig": { + "version": "4.10.1", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.1.tgz", + "integrity": "sha512-auHyJ4AgMz7vgS8Hp3N6HXSmlMdUyhSUrfBF16w153rxtLIEOE+HGqaBppczZvnHLqQJfiHotCYpNhl0lUROFQ==", + "license": "MIT", + "dependencies": { + "resolve-pkg-maps": "^1.0.0" + }, + "funding": { + "url": "https://github.com/privatenumber/get-tsconfig?sponsor=1" + } + }, + "node_modules/got": { + "version": "14.4.8", + "resolved": "https://registry.npmjs.org/got/-/got-14.4.8.tgz", + "integrity": "sha512-vxwU4HuR0BIl+zcT1LYrgBjM+IJjNElOjCzs0aPgHorQyr/V6H6Y73Sn3r3FOlUffvWD+Q5jtRuGWaXkU8Jbhg==", + "license": "MIT", + "dependencies": { + "@sindresorhus/is": "^7.0.1", + "@szmarczak/http-timer": "^5.0.1", + "cacheable-lookup": "^7.0.0", + "cacheable-request": "^12.0.1", + "decompress-response": "^6.0.0", + "form-data-encoder": "^4.0.2", + "http2-wrapper": "^2.2.1", + "lowercase-keys": "^3.0.0", + "p-cancelable": "^4.0.1", + "responselike": "^3.0.0", + "type-fest": "^4.26.1" + }, + "engines": { + "node": ">=20" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz", + "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==", + "license": "BSD-2-Clause" + }, + "node_modules/http-status-codes": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/http-status-codes/-/http-status-codes-2.3.0.tgz", + "integrity": "sha512-RJ8XvFvpPM/Dmc5SV+dC4y5PCeOhT3x1Hq0NU3rjGeg5a/CqlhZ7uudknPwZFz4aeAXDcbAyaeP7GAo9lvngtA==", + "license": "MIT" + }, + "node_modules/http2-wrapper": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-2.2.1.tgz", + "integrity": "sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==", + "license": "MIT", + "dependencies": { + "quick-lru": "^5.1.1", + "resolve-alpn": "^1.2.0" + }, + "engines": { + "node": ">=10.19.0" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/lowercase-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", + "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mimic-response": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", + "integrity": "sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==", + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/normalize-url": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-8.1.0.tgz", + "integrity": "sha512-X06Mfd/5aKsRHc0O0J5CUedwnPmnDtLF2+nq+KN9KSDlJHkPuh0JUviWjEWMe0SW/9TDdSLVPuk7L5gGTIA1/w==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-cancelable": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-4.0.1.tgz", + "integrity": "sha512-wBowNApzd45EIKdO1LaU+LrMBwAcjfPaYtVzV3lmfM3gf8Z4CHZsiIqlM8TZZ8okYvh5A1cP6gTfCRQtwUpaUg==", + "license": "MIT", + "engines": { + "node": ">=14.16" + } + }, + "node_modules/quick-lru": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", + "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", + "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", + "license": "MIT" + }, + "node_modules/resolve-pkg-maps": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", + "integrity": "sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==", + "license": "MIT", + "funding": { + "url": "https://github.com/privatenumber/resolve-pkg-maps?sponsor=1" + } + }, + "node_modules/responselike": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-3.0.0.tgz", + "integrity": "sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==", + "license": "MIT", + "dependencies": { + "lowercase-keys": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/shogiops": { + "version": "0.19.0", + "resolved": "https://registry.npmjs.org/shogiops/-/shogiops-0.19.0.tgz", + "integrity": "sha512-cMmazXORRClObWPjMe9hW+whr7oGF2mXKYJlHi76bHDffCY19UQjtlb9zSC04LchfdJOLQNXYLYKUTeddQcClA==", + "license": "GPL-3.0-or-later", + "dependencies": { + "@badrap/result": "^0.2" + }, + "funding": { + "url": "https://lishogi.org/patron" + } + }, + "node_modules/tslog": { + "version": "4.10.2", + "resolved": "https://registry.npmjs.org/tslog/-/tslog-4.10.2.tgz", + "integrity": "sha512-XuELoRpMR+sq8fuWwX7P0bcj+PRNiicOKDEb3fGNURhxWVyykCi9BNq7c4uVz7h7P0sj8qgBsr5SWS6yBClq3g==", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/fullstack-build/tslog?sponsor=1" + } + }, + "node_modules/tsx": { + "version": "4.20.5", + "resolved": "https://registry.npmjs.org/tsx/-/tsx-4.20.5.tgz", + "integrity": "sha512-+wKjMNU9w/EaQayHXb7WA7ZaHY6hN8WgfvHNQ3t1PnU91/7O8TcTnIhCDYTZwnt8JsO9IBqZ30Ln1r7pPF52Aw==", + "license": "MIT", + "dependencies": { + "esbuild": "~0.25.0", + "get-tsconfig": "^4.7.5" + }, + "bin": { + "tsx": "dist/cli.mjs" + }, + "engines": { + "node": ">=18.0.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + } + }, + "node_modules/type-fest": { + "version": "4.41.0", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.41.0.tgz", + "integrity": "sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==", + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/typescript": { + "version": "5.9.2", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz", + "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==", + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "6.21.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", + "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", + "license": "MIT" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..0a83d11 --- /dev/null +++ b/package.json @@ -0,0 +1,45 @@ +{ + "name": "shoginet", + "version": "5.0.0", + "description": "Distributed network for lishogi.org", + "license": "AGPL-3.0-or-later", + "author": "WandererXII", + "repository": { + "type": "git", + "url": "https://github.com/WandererXII/shoginet" + }, + "bugs": { + "url": "https://github.com/WandererXII/shoginet/issues" + }, + "homepage": "https://lishogi.org", + "type": "module", + "engines": { + "node": ">=22" + }, + "dependencies": { + "@biomejs/biome": "2.2.4", + "@types/config": "^3.3.5", + "@types/node": "^22", + "@types/semver": "^7.7.1", + "config": "^4.1.1", + "got": "^14.4.8", + "http-status-codes": "^2.3.0", + "semver": "^7.7.2", + "shogiops": "^0.19.0", + "tslog": "^4.10.2", + "tsx": "^4.20.5", + "typescript": "^5.9.2" + }, + "scripts": { + "start": "tsx ./src/main.ts", + "systemd": "tsx ./src/systemd.ts", + "test": "NODE_ENV=test tsx ./test/main.ts", + "build-engines": "$npm_execpath run yaneuraou && $npm_execpath run fairy", + "build-yaneuraou": "./scripts/yaneuraou.sh", + "build-fairy": "./scripts/fairy.sh", + "format": "biome format", + "format:write": "biome format --write", + "lint": "biome check", + "lint:write": "biome check --write" + } +} diff --git a/progressReporter.py b/progressReporter.py deleted file mode 100644 index 4885444..0000000 --- a/progressReporter.py +++ /dev/null @@ -1,57 +0,0 @@ -import threading -import configparser -import requests -import queue -import json -import consts -import time -import typing -from config import get_endpoint -from logger import log - - -class ProgressReporter(threading.Thread): - def __init__(self, queue_size: int, conf: configparser.ConfigParser) -> None: - super(ProgressReporter, self).__init__() - self.http = requests.Session() - self.conf = conf - - self.queue: queue.Queue = queue.Queue(maxsize=queue_size) - self._poison_pill = object() - - def send(self, job: typing.Any, result: typing.Any) -> None: - path = "analysis/%s" % job["work"]["id"] - data = json.dumps(result).encode("utf-8") - try: - self.queue.put_nowait((path, data)) - except queue.Full: - log.debug( - "Could not keep up with progress reports. Dropping one.") - - def stop(self) -> None: - while not self.queue.empty(): - self.queue.get_nowait() - self.queue.put(self._poison_pill) - - def run(self) -> None: - while True: - item = self.queue.get() - if item == self._poison_pill: - return - - path, data = item - - try: - response = self.http.post(get_endpoint(self.conf, path), - data=data, - timeout=consts.HTTP_TIMEOUT) - if response.status_code == 429: - log.error( - "Too many requests. Suspending progress reports for 60s ...") - time.sleep(60.0) - elif response.status_code != 204: - log.error( - "Expected status 204 for progress report, got %d", response.status_code) - except requests.RequestException as err: - log.warning( - "Could not send progress report (%s). Continuing.", err) diff --git a/build-fairy.sh b/scripts/fairy.sh similarity index 78% rename from build-fairy.sh rename to scripts/fairy.sh index 7348a97..480e578 100755 --- a/build-fairy.sh +++ b/scripts/fairy.sh @@ -2,6 +2,8 @@ echo "- Getting latest Fairy-Stockfish ..." +cd engines + if [ -d Fairy-Stockfish/src ]; then cd Fairy-Stockfish/src make clean > /dev/null @@ -14,24 +16,21 @@ fi echo "- Determining CPU architecture ..." ARCH=x86-64 -EXE=fairy-stockfish-largeboard_x86-64 +EXE=fairy-stockfish if [ -f /proc/cpuinfo ]; then if grep "^flags" /proc/cpuinfo | grep -q popcnt ; then ARCH=x86-64-modern - EXE=fairy-stockfish-largeboard_x86-64-modern fi if grep "^vendor_id" /proc/cpuinfo | grep -q Intel ; then if grep "^flags" /proc/cpuinfo | grep bmi2 | grep -q popcnt ; then ARCH=x86-64-bmi2 - EXE=fairy-stockfish-largeboard_x86-64-bmi2 fi fi fi -echo "- Building $EXE ... (patience advised)" +echo "- Building Fairy-stockfish $ARCH ... (patience advised)" make -j$(nproc || echo 4) build ARCH=$ARCH EXE=../../$EXE largeboards=yes > /dev/null -cd ../.. echo "- Done!" \ No newline at end of file diff --git a/build-yaneuraou.sh b/scripts/yaneuraou.sh similarity index 56% rename from build-yaneuraou.sh rename to scripts/yaneuraou.sh index c929ee1..39137ea 100755 --- a/build-yaneuraou.sh +++ b/scripts/yaneuraou.sh @@ -2,6 +2,8 @@ echo "- Getting latest YaneuraOu..." +cd engines + if [ -d YaneuraOu/source ]; then cd YaneuraOu/source make clean > /dev/null @@ -16,28 +18,17 @@ echo "- Determining CPU architecture..." ARCH=SSE42 COMP=g++ -case "$(uname -s)" in - Darwin) - if sysctl -a | grep machdep.cpu.leaf7_features | grep -q AVX2 ; then - ARCH=AVX2 - fi - ;; - Linux) - if [ -f /proc/cpuinfo ]; then - if grep "^flags" /proc/cpuinfo | grep -q avx2 ; then - ARCH=AVX2 - fi - fi - ;; -esac - -distFile="YaneuraOu-by-gcc-$ARCH" +if grep -q avx2 /proc/cpuinfo 2>/dev/null || \ + (sysctl -a 2>/dev/null | grep machdep.cpu.leaf7_features | grep -q AVX2); then + ARCH=AVX2 +fi + +distFile="YaneuraOu" echo "- Building YANEURAOU $ARCH ... (patience advised)" make -j$(nproc || echo 4) TARGET_CPU=$ARCH YANEURAOU_EDITION=YANEURAOU_ENGINE_NNUE COMPILER=$COMP > /dev/null -cd ../.. -mv ./YaneuraOu/source/YaneuraOu-by-gcc $distFile +mv ./YaneuraOu-by-gcc ../../ echo "- Done!" diff --git a/shoginet.py b/shoginet.py deleted file mode 100644 index baaf684..0000000 --- a/shoginet.py +++ /dev/null @@ -1,229 +0,0 @@ -import typing -import argparse -import collections -import sys -import platform -import requests -import consts -from config import configure, load_conf, validate_command, conf_get, get_yaneuraou_command, get_fairy_command, get_engine_dir, get_key, validate_cores, validate_memory, validate_threads, get_endpoint -import util -import errors -from worker import Worker -from progressReporter import ProgressReporter -import signals -from logger import log -from systemd import systemd -from intro import intro -from cpuid import cpuid - - -def cmd_run(args: typing.Any) -> int: - conf = load_conf(args) - - yane_command = validate_command( - conf_get(conf, "YaneuraOuCommand"), conf) - if not yane_command: - yane_command = get_yaneuraou_command(conf) - - fairy_command = validate_command( - conf_get(conf, "FairyCommand"), conf) - if not fairy_command: - fairy_command = get_fairy_command(conf) - - print() - print("### Checking configuration ...") - print() - print("Python: %s (with requests %s)" % - (platform.python_version(), requests.__version__)) - print("EngineDir: %s" % get_engine_dir(conf)) - print("YaneuraOuCommand: %s" % yane_command) - print("FairyCommand: %s" % fairy_command) - print("Key: %s" % (("*" * len(get_key(conf))) or "(none)")) - - cores = validate_cores(conf_get(conf, "Cores")) - print("Cores: %d" % cores) - - threads = validate_threads(conf_get(conf, "Threads"), conf) - instances = max(1, cores // threads) - print("Engine processes: %d (each ~%d threads)" % (instances, threads)) - memory = validate_memory(conf_get(conf, "Memory"), conf) - print("Memory: %d MB" % memory) - endpoint = get_endpoint(conf) - warning = "" if endpoint.startswith( - "https://") else " (WARNING: not using https)" - print("Endpoint: %s%s" % (endpoint, warning)) - print("FixedBackoff: %s" % - util.parse_bool(conf_get(conf, "FixedBackoff"))) - print() - - if conf.has_section("Stockfish") and conf.items("Stockfish"): - print("Using custom USI options is discouraged:") - for name, value in conf.items("Stockfish"): - if name.lower() == "hash": - hint = " (use --memory instead)" - elif name.lower() == "threads": - hint = " (use --threads-per-process instead)" - else: - hint = "" - print(" * %s = %s%s" % (name, value, hint)) - print() - - print("### Starting workers ...") - print() - - buckets = [0] * instances - for i in range(0, cores): - buckets[i % instances] += 1 - - progress_reporter = ProgressReporter(len(buckets) + 4, conf) - progress_reporter.daemon = True - progress_reporter.start() - - workers = [Worker(conf, bucket, memory // instances, - progress_reporter) for bucket in buckets] - - # Start all threads - for i, worker in enumerate(workers): - worker.set_name("><> %d" % (i + 1)) - worker.daemon = True - worker.start() - - # Wait while the workers are running - try: - # Let SIGTERM and SIGINT gracefully terminate the program - handler = signals.SignalHandler() - - try: - while True: - # Check worker status - for _ in range(int(max(1, consts.STAT_INTERVAL / len(workers)))): - for worker in workers: - worker.finished.wait(1.0) - if worker.fatal_error: - raise worker.fatal_error - - # Log stats - log.info("[shoginet v%s] Analyzed %d positions, crunched %d million nodes", - consts.SN_VERSION, - sum(worker.positions for worker in workers), - int(sum(worker.nodes for worker in workers) / 1000 / 1000)) - - except errors.ShutdownSoon: - handler = signals.SignalHandler() - - if any(worker.job for worker in workers): - log.info( - "\n\n### Stopping soon. Press ^C again to abort pending jobs ...\n") - - for worker in workers: - worker.stop_soon() - - for worker in workers: - while not worker.finished.wait(0.5): - pass - except (errors.Shutdown, errors.ShutdownSoon): - if any(worker.job for worker in workers): - log.info("\n\n### Good bye! Aborting pending jobs ...\n") - else: - log.info("\n\n### Good bye!") - finally: - handler.ignore = True - - # Stop workers - for worker in workers: - worker.stop() - - progress_reporter.stop() - - # Wait - for worker in workers: - worker.finished.wait() - - return 0 - - -def cmd_configure(args: typing.Any) -> int: - configure(args) - return 0 - - -def cmd_systemd(args: typing.Any) -> int: - systemd(args) - return 0 - - -def cmd_cpuid(argv: typing.Any) -> int: - cpuid() - return 0 - - -def main(argv: typing.Any) -> int: - # Parse command line arguments - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--verbose", "-v", default=0, - action="count", help="increase verbosity") - parser.add_argument("--version", action="version", - version="shoginet v{0}".format(consts.SN_VERSION)) - - g = parser.add_argument_group("configuration") - g.add_argument("--auto-update", action="store_true", - help="automatically install available updates") - g.add_argument("--conf", help="configuration file") - g.add_argument("--no-conf", action="store_true", - help="do not use a configuration file") - g.add_argument("--key", "--apikey", "-k", help="shoginet api key") - - g = parser.add_argument_group("resources") - g.add_argument( - "--cores", help="number of cores to use for engine processes (or auto for n - 1, or all for n)") - g.add_argument( - "--memory", help="total memory (MB) to use for engine hashtables") - - g = parser.add_argument_group("advanced") - g.add_argument( - "--endpoint", help="lishogi https endpoint (default: %s)" % consts.DEFAULT_ENDPOINT) - g.add_argument("--engine-dir", help="engine working directory") - g.add_argument("--yaneuraou-command", - help="YaneuraOu command (default: YaneuraOu-by-gcc)") - g.add_argument("--fairy-command", - help="Fairy stockfish command (default: fairy-stockfish-largeboard_x86-64)") - g.add_argument("--threads-per-process", "--threads", type=int, dest="threads", - help="hint for the number of threads to use per engine process (default: %d)" % consts.DEFAULT_THREADS) - g.add_argument("--fixed-backoff", action="store_true", default=None, - help="fixed backoff (only recommended for move servers)") - g.add_argument("--no-fixed-backoff", dest="fixed_backoff", - action="store_false", default=None) - g.add_argument("--setoptionYaneuraou", nargs=2, action="append", default=[], - metavar=("NAME", "VALUE"), help="set a custom usi option for YaneuraOu") - g.add_argument("--setoptionFairy", nargs=2, action="append", default=[], - metavar=("NAME", "VALUE"), help="set a custom usi option for Fairy Stockfish") - - commands = collections.OrderedDict([ - ("run", cmd_run), - ("configure", cmd_configure), - ("systemd", cmd_systemd), - ("cpuid", cmd_cpuid), - ]) - - parser.add_argument("command", default="run", - nargs="?", choices=commands.keys()) - - args = parser.parse_args(argv[1:]) - - # Show intro - if args.command not in ["systemd"]: - print(intro()) - sys.stdout.flush() - - # Run - try: - sys.exit(commands[args.command](args)) - except errors.ConfigError: - log.exception("Configuration error") - return 78 - except (KeyboardInterrupt, errors.Shutdown, errors.ShutdownSoon): - return 0 - - -if __name__ == "__main__": - main(sys.argv) diff --git a/signals.py b/signals.py deleted file mode 100644 index ba5697c..0000000 --- a/signals.py +++ /dev/null @@ -1,33 +0,0 @@ -import signal -from typing import Optional -from types import FrameType -import errors - - -class SignalHandler(object): - def __init__(self) -> None: - self.ignore = False - - signal.signal(signal.SIGTERM, self.handle_term) - signal.signal(signal.SIGINT, self.handle_int) - - try: - signal.signal(signal.SIGUSR1, self.handle_usr1) - except AttributeError: - # No SIGUSR1 on Windows - pass - - def handle_int(self, signum: int, frame: Optional[FrameType]) -> None: - if not self.ignore: - self.ignore = True - raise errors.ShutdownSoon() - - def handle_term(self, signum: int, frame: Optional[FrameType]) -> None: - if not self.ignore: - self.ignore = True - raise errors.Shutdown() - - def handle_usr1(self, signum: int, frame: Optional[FrameType]) -> None: - if not self.ignore: - self.ignore = True - raise errors.UpdateRequired() diff --git a/src/config/client.ts b/src/config/client.ts new file mode 100644 index 0000000..9ea755f --- /dev/null +++ b/src/config/client.ts @@ -0,0 +1,48 @@ +import os from 'node:os'; +import config from 'config'; +import { HASH_MIN, MAX_WORKERS } from '../consts.js'; + +interface Config { + workers: number; + engines: { + yaneuraou: { + path: string; + threads: number; + memory: number; + }; + fairy: { + path: string; + threads: number; + memory: number; + }; + }; + logger: 'error' | 'warn' | 'info' | 'debug' | 'silly'; + key: string; + autoUpdate: boolean; + endpoint: string; +} + +export const clientConfig = config as unknown as Config; + +export function verifyConfig(): void { + const sc = clientConfig; + const maxThreads = os.cpus().length; + + if (!sc.endpoint) throw 'No endpoint set'; + if (sc.workers <= 0 || sc.workers > MAX_WORKERS) + throw `0 < workers <= ${MAX_WORKERS}`; + if (sc.engines.fairy.threads < 1 || sc.engines.yaneuraou.threads < 1) + throw 'Minimum 1 thread for every engine'; + if ( + sc.engines.fairy.threads > maxThreads || + sc.engines.yaneuraou.threads > maxThreads + ) + throw `You can set up to ${maxThreads} with your CPU`; + if ( + sc.engines.fairy.memory < HASH_MIN || + sc.engines.yaneuraou.memory < HASH_MIN + ) + throw `Minimum ${HASH_MIN}MB memory per engine`; + if (!['error', 'warn', 'info', 'debug', 'silly'].includes(sc.logger)) + throw 'Invalid logger level value'; +} diff --git a/src/config/server.ts b/src/config/server.ts new file mode 100644 index 0000000..91b0ea8 --- /dev/null +++ b/src/config/server.ts @@ -0,0 +1,61 @@ +import { SERVER_CONFIG_REFETCH_SECONDS } from '../consts.js'; +import { getServerConfig } from '../http.js'; +import { baseLogger } from '../logger.js'; +import type { Level } from '../types.js'; + +type LevelSettings = { + [level in `${Level}`]: { + skill?: number; + movetime?: number; + depth?: number; + nodes?: number; + }; +}; + +interface Config { + analysis: { + movetime?: number; + nodes?: number; + depth?: number; + }; + puzzle: { + movetime?: number; + depth?: number; + maxLength?: number; + }; + move: { + fairy: LevelSettings; + yaneuraou: LevelSettings; + }; +} + +export class ServerConfig { + public config!: Config; + private logger = baseLogger.getSubLogger({ + name: 'server-config', + }); + + async initialize(): Promise { + await this.load(); + this.startPeriodicRefresh(); + } + + private startPeriodicRefresh(): void { + setInterval(() => { + try { + this.load(); + } catch { + this.logger.error('Failed to load config from server'); + } + }, SERVER_CONFIG_REFETCH_SECONDS * 1000); + } + + async load(): Promise { + const newConfig = await getServerConfig(); + this.logger.debug('New config loaded:', newConfig); + if (!newConfig || typeof newConfig !== 'object') + throw new Error('Received invalid config from server'); + + this.config = newConfig; + } +} diff --git a/src/consts.ts b/src/consts.ts new file mode 100644 index 0000000..5eb8d05 --- /dev/null +++ b/src/consts.ts @@ -0,0 +1,19 @@ +export const ANALYSIS_PROGRESS_INTERVAL_SECONDS = 7; +export const HTTP_TIMEOUT_IMPORTANT_SECONDS = 15; +export const HTTP_TIMEOUT_UNIMPORTANT_SECONDS = 10; +export const LOGGER_REPORT_INTERVAL_SECONDS: number = 30 * 60; +export const SERVER_CONFIG_REFETCH_SECONDS: number = 8 * 60 * 60; +export const MAX_BACKOFF_SECONDS = 35.0; +export const MAX_WORKERS = 4; +export const DEFAULT_ANALYSIS_MOVETIME_SECONDS = 3; +export const DEFAULT_MOVE_MOVETIME_SECONDS = 0.5; +export const DEFAULT_PUZZLE_MOVETIME_SECONDS = 3; +export const WORKER_INIT_TIMEOUT_SECONDS = 30; +export const TASK_ANALYSIS_TIMEOUT_SECONDS: number = 10 * 60; +export const TASK_MOVE_TIMEOUT_SECONDS = 20; +export const TASK_PUZZLE_TIMEOUT_SECONDS: number = 4 * 60; + +// engine +export const MIN_FAIRY_VERSION: Date = new Date(2025, 10 - 1, 6); +export const MIN_YANEURAOU_VERSION = 9; +export const HASH_MIN = 64; diff --git a/src/engine.ts b/src/engine.ts new file mode 100644 index 0000000..abe9468 --- /dev/null +++ b/src/engine.ts @@ -0,0 +1,255 @@ +import { type ChildProcessWithoutNullStreams, spawn } from 'node:child_process'; +import { EventEmitter } from 'node:events'; +import * as path from 'node:path'; +import { cwd } from 'node:process'; +import * as readline from 'node:readline'; +import type { Rules } from 'shogiops/types'; +import { clientConfig } from './config/client.js'; +import type { EngineInfo, EngineKind } from './types.js'; +import type { Worker } from './worker.js'; + +const options: Record> = { + yaneuraou: { + Threads: clientConfig.engines.yaneuraou.threads, + USI_Hash: clientConfig.engines.yaneuraou.memory, + EnteringKingRule: 'CSARule27H', + EvalDir: path.join(cwd(), 'eval'), + BookFile: 'no_book', + ConsiderationMode: 'true', + OutputFailLHPV: 'true', + }, + fairy: { + Threads: clientConfig.engines.fairy.threads, + USI_Hash: clientConfig.engines.fairy.memory, + }, +}; + +export interface EngineEvents { + usiok: () => void; + readyok: () => void; + info: (args: string) => void; + bestmove: (usi: string) => void; + failure: () => void; +} + +export declare interface Engine { + on(event: U, listener: EngineEvents[U]): this; + off(event: U, listener: EngineEvents[U]): this; + once(event: U, listener: EngineEvents[U]): this; + emit( + event: U, + ...args: Parameters + ): boolean; +} + +export class Engine extends EventEmitter { + private process: ChildProcessWithoutNullStreams | undefined; + + private stdoutInterface: readline.Interface | undefined; + private stderrInterface: readline.Interface | undefined; + + private logger: Worker['logger']; + + private isDestroyed = false; + public isActive = false; + public info: EngineInfo = {}; + + private history: string[] = []; + + constructor( + worker: Worker, + public readonly kind: EngineKind, + ) { + super(); + + this.logger = worker.logger.getSubLogger({ + name: `${this.kind}`, + }); + + const command = + this.kind === 'fairy' + ? clientConfig.engines.fairy.path + : clientConfig.engines.yaneuraou.path; + + this.process = spawn(command); + + this.process.on('error', (err) => { + this.logger.error(`Engine couldn't start: ${err}`); + }); + + this.process.on('exit', (code, signal) => { + this.isActive = false; + + if (code) + this.logger.info(`Engine exited with code ${code}, signal ${signal}`); + else { + this.logger.error( + `Engine failure, signal ${signal}, history:`, + this.history, + ); + this.emit('failure'); + } + }); + + this.stdoutInterface = readline.createInterface({ + input: this.process.stdout, + }); + this.stdoutInterface.on('line', (line) => { + if (!line) return; + + this.logger.silly(`>> ${line}`); + + const index = line.indexOf(' '); + const [cmd, rest] = + index === -1 + ? [line, ''] + : [line.slice(0, index), line.slice(index + 1)]; + + if (cmd === 'usiok') this.emit('usiok'); + else if (cmd === 'readyok') this.emit('readyok'); + else if (cmd === 'info') this.emit('info', rest); + else if (cmd === 'bestmove') this.emit('bestmove', rest.split(/\s+/)[0]); + else if (cmd === 'id') { + const parts = rest.split(/\s+/); + if (parts.length >= 2) this.info[parts[0]] = parts.slice(1).join(' '); + } else if (cmd === 'option') { + this.info.options = this.info.options || []; + this.info.options.push(rest.split(' ')[1]); + } + }); + + this.stderrInterface = readline.createInterface({ + input: this.process.stderr, + }); + this.stderrInterface.on('line', (line) => { + this.logger.error(`stderr: ${line}`); + }); + + this.once('usiok', () => { + this.logger.debug('id:', this.info); + for (const [name, value] of Object.entries(options[this.kind])) { + this.setOption(name, value); + } + this.once('readyok', () => { + this.isActive = true; + }); + this.send('isready'); + }); + this.send('usi'); + } + + destroy(): void { + if (!this.process || this.isDestroyed) return; + + const process = this.process; + + this.isDestroyed = true; + this.isActive = false; + this.process = undefined; + this.info = {}; + + this.removeAllListeners(); + this.stdoutInterface?.removeAllListeners(); + this.stderrInterface?.removeAllListeners(); + process.removeAllListeners(); + + process.stdin.end(); + this.stdoutInterface?.close(); + this.stderrInterface?.close(); + + const forceKillTimeout = setTimeout(() => { + if (!process.killed) { + this.logger.debug('Sending SIGKILL'); + process.kill('SIGKILL'); + } + }, 500); + process.once('exit', () => { + this.logger.info('Engined exited successfully'); + clearTimeout(forceKillTimeout); + }); + this.logger.debug('Sending SIGTERM'); + process.kill('SIGTERM'); + } + + send(line: string): void { + this.logger.debug(`<< ${line}`); + + if (!this.process) this.logger.error(`No process to send line: ${line}`); + else { + this.history.push(line); + if (this.history.length > 10) this.history.shift(); + this.process.stdin.write(`${line}\n`, (err) => { + if (err) this.logger.error(`Engine write error: ${err.message}`); + }); + } + } + + setOption(name: string, value: string | number | boolean | null): void { + if (!this.info.options?.includes(name)) + this.logger.warn(`Setting unknown option: name ${name} value ${value}`); + + let valueStr: string; + + if (value === true) valueStr = 'true'; + else if (value === false) valueStr = 'false'; + else if (value === null) valueStr = 'none'; + else valueStr = String(value); + + this.send(`setoption name ${name} value ${valueStr}`); + } + + setVariant(variant: Rules): void { + if (this.kind === 'fairy') + this.setOption('USI_Variant', variant === 'standard' ? 'shogi' : variant); + } + + setMultiPv(pv: number): void { + if (this.kind === 'fairy') this.setOption('USI_MultiPV', pv); + else this.setOption('MultiPV', pv); + } + + search( + position: string, + moves: string[], + options?: { + movetime: number | undefined; + depth: number | undefined; + nodes: number | undefined; + clock: + | { btime: number; wtime: number; byo: number; inc: number } + | undefined; + }, + ): void { + this.once('readyok', () => { + this.send(`position sfen ${position} moves ${moves.join(' ')}`); + + const builder = ['go']; + + if (options?.movetime !== undefined) + builder.push('movetime', String(options.movetime)); + if (options?.depth !== undefined) + builder.push('depth', String(options.depth)); + if (options?.nodes !== undefined) + builder.push('nodes', String(options.nodes)); + + if (options?.clock) { + builder.push( + 'btime', + String(options.clock.btime * 10), + 'wtime', + String(options.clock.wtime * 10), + 'byoyomi', + String(options.clock.byo * 1000), + ); + + if (options.clock.inc) { + builder.push('binc', String(options.clock.inc * 1000)); + builder.push('winc', String(options.clock.inc * 1000)); + } + } + + this.send(builder.join(' ')); + }); + this.send('isready'); + } +} diff --git a/src/http.ts b/src/http.ts new file mode 100644 index 0000000..060b92a --- /dev/null +++ b/src/http.ts @@ -0,0 +1,113 @@ +import got, { type Response } from 'got'; +import { StatusCodes } from 'http-status-codes'; +import pkg from '../package.json' with { type: 'json' }; +import { clientConfig } from './config/client.js'; +import { + HTTP_TIMEOUT_IMPORTANT_SECONDS, + HTTP_TIMEOUT_UNIMPORTANT_SECONDS, +} from './consts.js'; +import { baseLogger } from './logger.js'; +import type { Work } from './types.js'; + +const headers = { + 'shoginet-version': pkg.version, + 'shoginet-key': clientConfig.key, +}; + +function og(res: Record) { + return { + ...res, + shoginet: { + version: pkg.version, + python: 'NO', + apikey: clientConfig.key, + }, + yaneuraou: { name: 'Y', options: {} }, + fairy: { name: 'F', options: {} }, + }; +} + +function processResponse(res: Response): Work | undefined { + if (res.statusCode === StatusCodes.NO_CONTENT) return undefined; + if (res.statusCode === StatusCodes.ACCEPTED) + return JSON.parse(res.body) as Work; + if (res.statusCode === StatusCodes.UNAUTHORIZED) { + baseLogger.error(res.body); + process.exit(1); + } + throw new Error(`Unexpected status ${res.statusCode}: ${res}`); +} + +function joinPath(path: string) { + return new URL(`shoginet/${path}`, clientConfig.endpoint).toString(); +} + +export async function acquireWork(): Promise { + try { + const url = joinPath('acquire'); + const response = await got.post(url, { + timeout: { request: HTTP_TIMEOUT_IMPORTANT_SECONDS * 1000 }, + headers, + throwHttpErrors: false, + json: og({}), + }); + const work = processResponse(response); + return work; + } catch (err) { + baseLogger.error('Failed to acquire work:', err); + return undefined; + } +} + +export async function submitWork( + work: Work, + res: Record, +): Promise { + try { + const url = joinPath(`${work.work.type}/${work.work.id}`); + const response = await got.post(url, { + timeout: { request: HTTP_TIMEOUT_IMPORTANT_SECONDS * 1000 }, + headers, + json: og(res), + }); + return processResponse(response); + } catch (err) { + baseLogger.error('Failed to submit work:', err); + return undefined; + } +} + +export async function abortWork(work: Work): Promise { + try { + await got.post(joinPath(`abort/${work.work.id}`), { + timeout: { request: HTTP_TIMEOUT_UNIMPORTANT_SECONDS * 1000 }, + headers, + }); + } catch (err) { + baseLogger.error(`Failed to abort work: ${err}`); + } +} + +export async function analysisProgressReport( + work: Work, + res: any, +): Promise { + try { + await got.post(joinPath(`${work.work.type}/${work.work.id}`), { + timeout: { request: HTTP_TIMEOUT_UNIMPORTANT_SECONDS * 1000 }, + headers, + json: og({ ...res, partial: true }), + }); + } catch (err) { + baseLogger.warn(`Failed to submit analysis progress: ${err}`); + } +} + +export function getServerConfig(): Promise { + return got + .get(joinPath('config'), { + timeout: { request: HTTP_TIMEOUT_IMPORTANT_SECONDS * 1000 }, + headers, + }) + .json(); +} diff --git a/src/logger.ts b/src/logger.ts new file mode 100644 index 0000000..36cde76 --- /dev/null +++ b/src/logger.ts @@ -0,0 +1,21 @@ +import { type ILogObj, Logger } from 'tslog'; +import { clientConfig } from './config/client.js'; + +export const baseLogger: Logger = new Logger({ + name: 'shoginet', + minLevel: levelToNumber(clientConfig.logger), + prettyLogTemplate: '{{logLevelName}}: ({{hh}}:{{MM}}:{{ss}}) [{{name}}] ', + prettyLogTimeZone: 'local', + stylePrettyLogs: true, + hideLogPositionForProduction: true, +}); + +function levelToNumber(level: string): number { + const map: Record = { + debug: 2, + info: 3, + warn: 4, + error: 5, + }; + return map[level]; +} diff --git a/src/main.ts b/src/main.ts new file mode 100644 index 0000000..e6b5fb3 --- /dev/null +++ b/src/main.ts @@ -0,0 +1,65 @@ +import { verifyConfig } from './config/client.js'; +import { ServerConfig } from './config/server.js'; +import { baseLogger } from './logger.js'; +import { verifyVersion } from './version.js'; +import { WorkerManager } from './worker-manager.js'; + +async function main(): Promise { + baseLogger.info('Starting shoginet...'); + + baseLogger.info('Verifying config file...'); + try { + verifyConfig(); + } catch (err) { + baseLogger.error(`Invalid config file: ${err}`); + process.exit(1); + } + + const serverConfig = new ServerConfig(); + baseLogger.info('Fetching server config...'); + try { + await serverConfig.initialize(); + } catch (err) { + baseLogger.error(`Failed to fetch server config file: ${err}`); + process.exit(1); + } + + baseLogger.info('Verifying shoginet version...'); + try { + await verifyVersion(); + } catch (err) { + baseLogger.error(`Invalid version: ${err}`); + // process.exit(1); + } + + const workerManager = new WorkerManager(serverConfig); + baseLogger.info('Initializing worker manager...'); + try { + await workerManager.initialize(); + } catch (err) { + baseLogger.error(`Couldn't initialize workers: ${err}`); + process.exit(1); + } + + let nextForceShutdown = false; + const shutdown = async () => { + if (!nextForceShutdown) { + baseLogger.info('Shutting down...'); + nextForceShutdown = true; + await workerManager.stop(); + } else { + baseLogger.info('Forcing shutdown...'); + await workerManager.forceStop(); + } + + process.exit(0); + }; + + process.on('SIGINT', shutdown); + process.on('SIGTERM', shutdown); + + baseLogger.info('Shoginet is running...'); + await workerManager.start(); +} + +await main(); diff --git a/src/stats-reporter.ts b/src/stats-reporter.ts new file mode 100644 index 0000000..5013937 --- /dev/null +++ b/src/stats-reporter.ts @@ -0,0 +1,38 @@ +import { LOGGER_REPORT_INTERVAL_SECONDS } from './consts.js'; +import type { Work, WorkType } from './types.js'; +import type { Worker } from './worker.js'; + +export class StatsReporter { + private submittedWork: Record = { + move: 0, + analysis: 0, + puzzle: 0, + }; + + private logger: Worker['logger']; + private interval: NodeJS.Timeout | undefined; + + constructor(private worker: Worker) { + this.logger = worker.logger.getSubLogger({ + name: 'reporter', + }); + + this.bindEvents(); + + this.interval = setInterval(() => { + this.logger.info( + `Total: ${this.submittedWork.move} moves, ${this.submittedWork.analysis} analysis, ${this.submittedWork.puzzle} puzzles`, + ); + }, LOGGER_REPORT_INTERVAL_SECONDS * 1000); + } + + public stop(): void { + clearInterval(this.interval); + } + + private bindEvents(): void { + this.worker.on('result', (work: Work) => { + this.submittedWork[work.work.type]++; + }); + } +} diff --git a/src/systemd.ts b/src/systemd.ts new file mode 100644 index 0000000..246b88c --- /dev/null +++ b/src/systemd.ts @@ -0,0 +1,39 @@ +import { execSync } from 'node:child_process'; +import process from 'node:process'; + +function systemdConfig() { + const cwd = process.cwd(); + const user = execSync('whoami').toString().trim(); + const group = execSync('id -gn').toString().trim(); + + const output = `[Unit] +After=network-online.target +Wants=network-online.target + +[Service] +ExecStart=/usr/bin/npm run start +WorkingDirectory=${cwd} +ReadWriteDirectories=${cwd} +User=${user} +Group=${group} +Nice=5 +CapabilityBoundingSet= +PrivateTmp=true +PrivateDevices=true +DevicePolicy=closed +ProtectSystem=strict +NoNewPrivileges=true +Restart=always +RestartSec=5 +TimeoutStopSec=300 +KillSignal=SIGINT +KillMode=control-group + +[Install] +WantedBy=multi-user.target +`; + + console.log(output); +} + +systemdConfig(); diff --git a/src/types.ts b/src/types.ts new file mode 100644 index 0000000..6850655 --- /dev/null +++ b/src/types.ts @@ -0,0 +1,73 @@ +import type { Rules } from 'shogiops/types'; + +export type EngineKind = 'yaneuraou' | 'fairy'; + +export interface EngineInfo { + name?: string; + options?: string[]; + [key: string]: any; +} + +export interface Config { + endpoint: string; + key?: string; + yaneuraouPath: string; + fairyPath: string; + memory?: number; + cores?: number; +} + +export interface ScoreResult { + mate?: number; + cp?: number; +} + +export type WorkType = 'move' | 'analysis' | 'puzzle'; + +interface BaseWork { + work: { + type: WorkType; + id: string; + flavor: EngineKind; + }; + game_id: string; + position: string; + variant?: Rules; + moves: string; +} + +export type Level = 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10; +export interface MoveWork extends BaseWork { + work: { + type: 'move'; + id: string; + flavor: EngineKind; + level: Level; + clock?: { + wtime: number; + btime: number; + byo: number; + inc: number; + }; + }; +} + +export interface AnalysisWork extends BaseWork { + work: { + type: 'analysis'; + id: string; + flavor: EngineKind; + }; + nodes: number; + skipPositions: number[]; +} + +export interface PuzzleWork extends BaseWork { + work: { + type: 'puzzle'; + id: string; + flavor: EngineKind; + }; +} + +export type Work = AnalysisWork | MoveWork | PuzzleWork; diff --git a/src/version.ts b/src/version.ts new file mode 100644 index 0000000..d565508 --- /dev/null +++ b/src/version.ts @@ -0,0 +1,52 @@ +import got from 'got'; +import semver from 'semver'; +import pkg from '../package.json' with { type: 'json' }; +import { MIN_FAIRY_VERSION, MIN_YANEURAOU_VERSION } from './consts.js'; +import { baseLogger } from './logger.js'; + +export async function verifyVersion(): Promise { + const currentVersion = pkg.version; + const repo = pkg.repository.url.split('/').slice(-2).join('/'); + + const remotePkg = await got( + `https://raw.githubusercontent.com/${repo}/main/package.json`, + ).json<{ version: string }>(); + const remoteVersion = remotePkg.version; + + if (semver.gt(remoteVersion, currentVersion)) { + const currentMajor = semver.major(currentVersion); + const remoteMajor = semver.major(remoteVersion); + + if (remoteMajor > currentMajor) { + throw `Major version update available: ${remoteVersion} (current: ${currentVersion}). Exiting.`; + } else + baseLogger.warn( + `Newer version available: ${remoteVersion} (current: ${currentVersion}).`, + ); + } else baseLogger.info(`Current version (${currentVersion}) is up-to-date.`); +} + +export function verifyFairyVersion(versionStr: string): void { + const match = versionStr.match(/Fairy-Stockfish (\d{6})/); + if (!match) throw "Can't identify fairy version"; + + const dateStr = match[1]; + const day = parseInt(dateStr.slice(0, 2), 10); + const month = parseInt(dateStr.slice(2, 4), 10) - 1; // 0 based + const year = 2000 + parseInt(dateStr.slice(4, 6), 10); + + const versionDate = new Date(year, month, day); + if (versionDate < MIN_FAIRY_VERSION) + throw `Fairy version is too old, please update the fairy engine (min: ${MIN_FAIRY_VERSION.toDateString()}, current: ${versionDate.toDateString()})`; +} + +export function verifyYaneuraouVersion(versionStr: string): void { + const match = versionStr.match(/YaneuraOu NNUE ([\d.]+)/); + if (!match) throw "Can't identify YaneuraOu version"; + + const version = parseFloat(match[1]); + if (Number.isNaN(version)) throw 'Invalid YaneuraOu version format'; + + if (version < MIN_YANEURAOU_VERSION) + throw `YaneuraOu version is too old, please update the engine (min: ${MIN_YANEURAOU_VERSION}, current: ${match[1]})`; +} diff --git a/src/work/analysis.ts b/src/work/analysis.ts new file mode 100644 index 0000000..cc45706 --- /dev/null +++ b/src/work/analysis.ts @@ -0,0 +1,86 @@ +import { + ANALYSIS_PROGRESS_INTERVAL_SECONDS, + DEFAULT_ANALYSIS_MOVETIME_SECONDS, +} from '../consts.js'; +import type { Engine } from '../engine.js'; +import { analysisProgressReport } from '../http.js'; +import type { AnalysisWork } from '../types.js'; +import type { Worker } from '../worker.js'; +import { type ParsedInfo, parseInfo } from './util.js'; + +export function analysis( + worker: Worker, + engine: Engine, + work: AnalysisWork, +): void { + worker.logger.debug('Starting analysis'); + + const variant = work.variant || 'standard'; + const moves: string[] = work.moves.split(' '); + const skip = work.skipPositions ?? []; + const serverConfig = worker.serverConfig.config.analysis; + + if (engine.kind === 'fairy') { + engine.setVariant(variant); + engine.setOption('USI_AnalyseMode', true); + engine.setOption('Skill_Level', 20); + } + engine.setMultiPv(1); + engine.send('usinewgame'); + + const result: any = Array(moves.length + 1).fill(null); + + const start = Date.now(); + let lastProgress = start; + + const analysePly = (ply: number): void => { + if (ply < 0) { + worker.logger.debug('Emitting analysis result:', result); + worker.emit('result', work, { analysis: result }); + return; + } + + if (skip.includes(ply)) { + worker.logger.debug(`Skipping analysis: ${ply} ply`); + result[ply] = { skipped: true }; + analysePly(ply - 1); + return; + } + worker.logger.debug(`Analysing: ${ply} ply`); + + const now = Date.now(); + if (now > lastProgress + ANALYSIS_PROGRESS_INTERVAL_SECONDS * 1000) { + analysisProgressReport(work, { analysis: result }); + lastProgress = now; + } + + let deepestInfo: ParsedInfo | undefined; + const processInfo = (args: string) => { + const parsed = parseInfo(args); + if ( + !parsed.depth || + !deepestInfo?.depth || + parsed.depth >= deepestInfo.depth + ) + deepestInfo = parsed; + }; + + engine.on('info', processInfo); + + engine.once('bestmove', () => { + engine.off('info', processInfo); + result[ply] = deepestInfo; + analysePly(ply - 1); + }); + + engine.search(work.position, moves.slice(0, ply), { + nodes: serverConfig.nodes, + movetime: + serverConfig.movetime || DEFAULT_ANALYSIS_MOVETIME_SECONDS * 1000, + depth: serverConfig.depth, + clock: undefined, + }); + }; + + analysePly(moves.length); +} diff --git a/src/work/move.ts b/src/work/move.ts new file mode 100644 index 0000000..d96723c --- /dev/null +++ b/src/work/move.ts @@ -0,0 +1,40 @@ +import { makeUsi, parseUsi } from 'shogiops/util'; +import { DEFAULT_MOVE_MOVETIME_SECONDS } from '../consts.js'; +import type { Engine } from '../engine.js'; +import type { MoveWork } from '../types.js'; +import type { Worker } from '../worker.js'; + +export function move(worker: Worker, engine: Engine, work: MoveWork): void { + worker.logger.debug('Starting move generation'); + + const variant = work.variant || 'standard'; + const serverConfig = + worker.serverConfig.config.move[engine.kind][work.work.level]; + + if (engine.kind === 'fairy') { + engine.setVariant(variant); + if (serverConfig.skill) engine.setOption('Skill_Level', serverConfig.skill); + } + + engine.setMultiPv(1); + engine.send('usinewgame'); + + engine.once('bestmove', (usi) => { + const parsed = parseUsi(usi); + if (parsed) { + const result = { move: { bestmove: makeUsi(parsed) } }; + worker.logger.debug('Emitting move result:', result); + worker.emit('result', work, result); + } else { + worker.logger.warn(`Received '${usi}' for:`, work); + worker.emit('abort', work); + } + }); + + engine.search(work.position, work.moves.split(' '), { + movetime: serverConfig.movetime || DEFAULT_MOVE_MOVETIME_SECONDS * 1000, + depth: serverConfig.depth, + nodes: serverConfig.nodes, + clock: work.work.clock, + }); +} diff --git a/src/work/puzzle.ts b/src/work/puzzle.ts new file mode 100644 index 0000000..a0df35f --- /dev/null +++ b/src/work/puzzle.ts @@ -0,0 +1,93 @@ +import type { Color } from 'shogiops/types'; +import { makeUsi, opposite, parseUsi } from 'shogiops/util'; +import { DEFAULT_PUZZLE_MOVETIME_SECONDS } from '../consts.js'; +import type { Engine } from '../engine.js'; +import type { PuzzleWork, ScoreResult } from '../types.js'; +import type { Worker } from '../worker.js'; +import { parseInfo } from './util.js'; + +type MultiPvScores3 = [ + ScoreResult | undefined, + ScoreResult | undefined, + ScoreResult | undefined, +]; + +function winChances(score: ScoreResult): number { + if (score.mate) return score.mate > 0 ? 1 : -1; + else if (score.cp) return 2 / (1 + Math.exp(-0.0007 * score.cp)) - 1; + else return 0; +} + +function isAmbiguous(scores: MultiPvScores3): boolean { + if (scores.filter((s) => !!s).length <= 1) return false; + const bestScore = scores[0]; + const secondScore = scores[1]; + if (!bestScore || !secondScore) return false; + else return winChances(bestScore) < winChances(secondScore) + 0.33; +} + +export function puzzle(worker: Worker, engine: Engine, work: PuzzleWork): void { + worker.logger.debug('Starting puzzle analysis'); + + const moves: string[] = work.moves.split(' '); + const initialMovesLength = moves.length; + const position: string = work.position; + + const color: Color = position.split(' ')[1] !== 'w' ? 'sente' : 'gote'; + const winnerColor = initialMovesLength % 2 === 0 ? color : opposite(color); + + const serverConfig = worker.serverConfig.config.puzzle; + + if (engine.kind === 'fairy') { + engine.setVariant('standard'); + engine.setOption('Skill_Level', '20'); + engine.setOption('USI_AnalyseMode', 'true'); + } + // we need 2, but let's play it safe + engine.setMultiPv(3); + engine.send('usinewgame'); + + const analysePly = (depth: number, color: Color) => { + worker.logger.debug(`Analysing: ${depth} depth`); + + const scores: MultiPvScores3 = [undefined, undefined, undefined]; + + const processScore = (args: string) => { + const parsedInfo = parseInfo(args); + const multipv = parsedInfo.multipv || 1; + const score = parsedInfo.score; + + if (score !== undefined && scores.length >= multipv) + scores[multipv - 1] = score; + }; + + engine.on('info', processScore); + + engine.once('bestmove', (usi) => { + engine.off('info', processScore); + + const parsed = parseUsi(usi); + if ( + parsed && + (!serverConfig.maxLength || depth < serverConfig.maxLength) && + (color !== winnerColor || !isAmbiguous(scores)) + ) { + moves.push(makeUsi(parsed)); + analysePly(depth + 1, opposite(color)); + } else { + const result = { result: moves.length > initialMovesLength }; + worker.logger.debug('Emitting move result:', result); + worker.emit('result', work, result); + } + }); + + engine.search(position, moves, { + movetime: serverConfig.movetime || DEFAULT_PUZZLE_MOVETIME_SECONDS * 1000, + depth: serverConfig.depth, + nodes: undefined, + clock: undefined, + }); + }; + + analysePly(0, color); +} diff --git a/src/work/util.ts b/src/work/util.ts new file mode 100644 index 0000000..804a93b --- /dev/null +++ b/src/work/util.ts @@ -0,0 +1,55 @@ +import type { ScoreResult } from '../types.js'; + +export type ParsedInfo = { + depth?: number; + seldepth?: number; + score?: ScoreResult; + multipv?: number; + nodes?: number; + nps?: number; + time?: number; + pv?: string; // space separated +}; + +export function parseInfo(line: string): ParsedInfo { + const tokens = line.trim().split(/\s+/); + const out: ParsedInfo = {}; + + for (let i = 0; i < tokens.length; i++) { + const t = tokens[i]; + switch (t) { + case 'depth': + out.depth = Number(tokens[++i]); + break; + case 'seldepth': + out.seldepth = Number(tokens[++i]); + break; + case 'score': { + const scoreType = tokens[++i]; // "cp" or "mate" + const val = Number(tokens[++i]); + out.score = {}; + if (scoreType === 'cp') out.score.cp = val; + else if (scoreType === 'mate') out.score.mate = val; + break; + } + case 'multipv': + out.multipv = Number(tokens[++i]); + break; + case 'nodes': + out.nodes = Number(tokens[++i]); + break; + case 'nps': + out.nps = Number(tokens[++i]); + break; + case 'time': + out.time = Number(tokens[++i]); + break; + case 'pv': + out.pv = tokens.slice(i + 1).join(' '); + i = tokens.length; + break; + } + } + + return out; +} diff --git a/src/worker-manager.ts b/src/worker-manager.ts new file mode 100644 index 0000000..6441248 --- /dev/null +++ b/src/worker-manager.ts @@ -0,0 +1,163 @@ +import { clientConfig } from './config/client.js'; +import type { ServerConfig } from './config/server.js'; +import { MAX_BACKOFF_SECONDS, WORKER_INIT_TIMEOUT_SECONDS } from './consts.js'; +import * as http from './http.js'; +import { baseLogger } from './logger.js'; +import { verifyFairyVersion, verifyYaneuraouVersion } from './version.js'; +import { Worker } from './worker.js'; + +export class WorkerManager { + private workers: Worker[] = []; + private logger = baseLogger.getSubLogger({ + name: 'worker-manager', + }); + + private isRunning = false; + private abort = new AbortController(); + private waitingResolver: ((w: Worker) => void) | undefined = undefined; + + constructor(private readonly serverConfig: ServerConfig) {} + + async initialize(): Promise { + this.logger.info(`Initializing ${clientConfig.workers} workers...`); + + for (let i = 0; i < clientConfig.workers; i++) { + const w = new Worker(i, this.serverConfig); + + w.on('result', async (type, res) => { + const work = await http.submitWork(type, res); + if (work && this.isRunning) w.task(work); + else setTimeout(w.release.bind(w), 1000); // to avoid asking for work immediately + }); + + w.on('abort', (work) => { + http.abortWork(work); + w.release(); + }); + + w.on('available', () => { + if (this.waitingResolver) { + const resolve = this.waitingResolver; + this.waitingResolver = undefined; + resolve(w); + } + }); + + this.workers.push(w); + + await new Promise((resolve) => { + const timeout = setTimeout(() => { + throw `Worker #${i} initialization timed out`; + }, WORKER_INIT_TIMEOUT_SECONDS * 1000); + + w.once('initialized', () => { + clearTimeout(timeout); + this.logger.info(`Worker #${i} initialized`); + resolve(); + }); + + w.initialize(); + }); + } + + this.logger.info('Verifying engine versions...'); + try { + const worker = this.workers[0]; + verifyFairyVersion(worker.engines.fairy?.info.name || ''); + verifyYaneuraouVersion(worker.engines.yaneuraou?.info.name || ''); + } catch (err) { + this.logger.error(`Invalid engine version: ${err}`); + process.exit(1); + } + } + + async stop(): Promise { + this.isRunning = false; + this.abort.abort(); + + const busyWorkers = this.workers.filter((w) => !!w.currentWork); + if (busyWorkers.length) { + await new Promise((resolve) => { + this.logger.info( + 'Waiting for workers to finish, send signal again to force exit.', + ); + let done = 0; + const onDone = () => { + done++; + if (done >= busyWorkers.length) resolve(); + }; + + busyWorkers.forEach((w) => { + w.once('result', onDone); + w.once('failure', onDone); + }); + }); + this.workers.forEach((w) => { + w.stop(); + }); + } else this.forceStop(); + } + + async forceStop(): Promise { + this.isRunning = false; + this.abort.abort(); + + const busyWorkers = this.workers.filter((w) => !!w.currentWork); + for (const w of busyWorkers) { + if (w.currentWork) await http.abortWork(w.currentWork); + } + this.workers.forEach((w) => { + w.stop(); + }); + } + + async start(): Promise { + this.isRunning = true; + let noTask = 0; + while (this.isRunning) { + const availableWorker = await this.waitForWorker(); + const work = await http.acquireWork(); + + if (work) { + this.logger.debug('Received work:', work); + noTask = 0; + availableWorker.task(work); + } else { + noTask += 1; + const backoff = Math.min( + 500 * noTask + 500 * noTask * Math.random(), + MAX_BACKOFF_SECONDS * 1000, + ); + await this.sleep(backoff); + } + } + } + + private waitForWorker(): Promise { + const available = this.workers.find((w) => w.isAvailable()); + if (available) return Promise.resolve(available); + + return new Promise((resolve) => { + this.waitingResolver = resolve; + }); + } + + private sleep(backoff: number): Promise { + this.logger.debug(`Sleeping for: ${backoff}`); + + const abort = this.abort; + return new Promise((resolve) => { + function onAbort() { + clearTimeout(id); + abort.signal.removeEventListener('abort', onAbort); + resolve(); + } + + const id = setTimeout(() => { + abort.signal.removeEventListener('abort', onAbort); + resolve(); + }, backoff); + abort.signal.addEventListener('abort', onAbort); + }); + } +} diff --git a/src/worker.ts b/src/worker.ts new file mode 100644 index 0000000..eb45035 --- /dev/null +++ b/src/worker.ts @@ -0,0 +1,173 @@ +import EventEmitter from 'node:events'; +import type { ServerConfig } from './config/server.js'; +import { + TASK_ANALYSIS_TIMEOUT_SECONDS, + TASK_MOVE_TIMEOUT_SECONDS, + TASK_PUZZLE_TIMEOUT_SECONDS, +} from './consts.js'; +import { Engine } from './engine.js'; +import { abortWork } from './http.js'; +import { baseLogger } from './logger.js'; +import { StatsReporter } from './stats-reporter.js'; +import type { + AnalysisWork, + EngineKind, + MoveWork, + PuzzleWork, + Work, +} from './types.js'; +import { analysis } from './work/analysis.js'; +import { move } from './work/move.js'; +import { puzzle } from './work/puzzle.js'; + +export interface WorkerEvents { + initialized: () => void; + available: () => void; + result: (work: Work, result: any) => void; + abort: (work: Work) => void; +} + +export declare interface Worker { + on(event: U, listener: WorkerEvents[U]): this; + emit( + event: U, + ...args: Parameters + ): boolean; +} + +export class Worker extends EventEmitter { + public engines: Record = { + yaneuraou: undefined, + fairy: undefined, + }; + public currentWork: Work | undefined = undefined; + public logger: typeof baseLogger; + + private taskTimeout: NodeJS.Timeout | undefined; + private statsReporter: StatsReporter; + + constructor( + public readonly index: number, + public readonly serverConfig: ServerConfig, + ) { + super(); + + this.logger = baseLogger.getSubLogger({ + name: `worker-${this.index}`, + }); + this.statsReporter = new StatsReporter(this); + + this.on('result', () => { + clearTimeout(this.taskTimeout); + }); + } + + initialize(): void { + const enginesInitialized: Record = { + yaneuraou: !!this.engines.yaneuraou?.isActive, + fairy: !!this.engines.fairy?.isActive, + }; + + if (enginesInitialized.fairy && enginesInitialized.yaneuraou) { + this.emit('initialized'); + return; + } + + const onReady = (kind: EngineKind) => { + enginesInitialized[kind] = true; + if (enginesInitialized.fairy && enginesInitialized.yaneuraou) + this.emit('initialized'); + }; + + const onFailure = () => { + if (this.currentWork) { + this.logger.error('Aborting work due to failue', this.currentWork); + abortWork(this.currentWork); + } + this.initialize(); + }; + + const engineKinds: EngineKind[] = ['yaneuraou', 'fairy']; + engineKinds.forEach((kind) => { + const curEngine = this.engines[kind]; + if (!curEngine?.isActive) { + this.logger.info(`Initializing ${kind} engine`); + + curEngine?.destroy(); + + const newEngine = new Engine(this, kind); + newEngine.once('readyok', () => onReady(kind)); + newEngine.once('failure', onFailure); + + this.engines[kind] = newEngine; + } + }); + } + + stop(): void { + this.logger.info('Stopping...'); + + this.statsReporter.stop(); + this.removeAllListeners(); + this.engines.yaneuraou?.destroy(); + this.engines.fairy?.destroy(); + + this.engines.yaneuraou = undefined; + this.engines.fairy = undefined; + } + + isAvailable(): boolean { + return ( + !!this.engines.yaneuraou?.isActive && + !!this.engines.fairy?.isActive && + !this.currentWork + ); + } + + task(work: Work): void { + const workType = work.work.type; + const engine = this.engines[work.work.flavor]; + + if (!engine || !engine.isActive) { + this.logger.error('Engine not found'); + this.initialize(); + return; + } + + this.currentWork = work; + + const onTimeout = () => { + // no need to abort - server gave up a long time ago + engine.destroy(); + this.release(); + }; + + if (workType === 'analysis') { + this.taskTimeout = setTimeout( + onTimeout, + TASK_ANALYSIS_TIMEOUT_SECONDS * 1000, + ); + analysis(this, engine, work as AnalysisWork); + } else if (workType === 'move') { + this.taskTimeout = setTimeout( + onTimeout, + TASK_MOVE_TIMEOUT_SECONDS * 1000, + ); + move(this, engine, work as MoveWork); + } else if (workType === 'puzzle') { + this.taskTimeout = setTimeout( + onTimeout, + TASK_PUZZLE_TIMEOUT_SECONDS * 1000, + ); + puzzle(this, engine, work as PuzzleWork); + } else { + this.release(); + this.logger.error(`Invalid work type: ${workType}`); + } + } + + release(): void { + this.currentWork = undefined; + this.emit('available'); + } +} diff --git a/systemd.py b/systemd.py deleted file mode 100644 index 2bb3847..0000000 --- a/systemd.py +++ /dev/null @@ -1,127 +0,0 @@ -import consts -from config import load_conf, validate_key, validate_engine_dir, validate_command, validate_memory, validate_threads, validate_cores, validate_endpoint -import typing -import textwrap -from shlex import quote as shell_quote -import getpass -import os -import sys - - -def systemd(args: typing.Any) -> None: - conf = load_conf(args) - - template = textwrap.dedent("""\ - [Unit] - Description=Fishnet instance - After=network-online.target - Wants=network-online.target - - [Service] - ExecStart={start} - WorkingDirectory={cwd} - ReadWriteDirectories={cwd} - User={user} - Group={group} - Nice=5 - CapabilityBoundingSet= - PrivateTmp=true - PrivateDevices=true - DevicePolicy=closed - ProtectSystem={protect_system} - NoNewPrivileges=true - Restart=always - - [Install] - WantedBy=multi-user.target""") - - # Prepare command line arguments - builder = [shell_quote(sys.executable)] - - if __package__ is None: - builder.append(shell_quote(os.path.abspath(sys.argv[0]))) - else: - builder.append("-m") - builder.append(shell_quote( - os.path.splitext(os.path.basename(__file__))[0])) - - if args.no_conf: - builder.append("--no-conf") - else: - config_file = os.path.abspath(args.conf or consts.DEFAULT_CONFIG) - builder.append("--conf") - builder.append(shell_quote(config_file)) - - if args.key is not None: - builder.append("--key") - builder.append(shell_quote(validate_key(args.key, conf))) - if args.engine_dir is not None: - builder.append("--engine-dir") - builder.append(shell_quote(validate_engine_dir(args.engine_dir))) - yane_command = validate_command(args.yaneuraou_command, conf) - if args.yaneuraou_command is not None and yane_command is not None: - builder.append("--yaneuraou-command") - builder.append(shell_quote( - yane_command)) - fairy_command = validate_command(args.fairy_command, conf) - if args.fairy_command is not None and fairy_command is not None: - builder.append("--fairy-command") - builder.append(shell_quote( - fairy_command)) - if args.cores is not None: - builder.append("--cores") - builder.append(shell_quote(str(validate_cores(args.cores)))) - if args.memory is not None: - builder.append("--memory") - builder.append(shell_quote(str(validate_memory(args.memory, conf)))) - if args.threads is not None: - builder.append("--threads-per-process") - builder.append(shell_quote(str(validate_threads(args.threads, conf)))) - if args.endpoint is not None: - builder.append("--endpoint") - builder.append(shell_quote(validate_endpoint(args.endpoint))) - if args.fixed_backoff is not None: - builder.append( - "--fixed-backoff" if args.fixed_backoff else "--no-fixed-backoff") - for option_name, option_value in args.setoptionYaneuraou: - builder.append("--setoptionYaneuraou") - builder.append(shell_quote(option_name)) - builder.append(shell_quote(option_value)) - for option_name, option_value in args.setoptionFairy: - builder.append("--setoptionFairy") - builder.append(shell_quote(option_name)) - builder.append(shell_quote(option_value)) - if args.auto_update: - builder.append("--auto-update") - - builder.append("run") - - start = " ".join(builder) - - protect_system = "full" - if args.auto_update and os.path.realpath(os.path.abspath(__file__)).startswith("/usr/"): - protect_system = "false" - - print(template.format( - user=getpass.getuser(), - group=getpass.getuser(), - cwd=os.path.abspath("."), - start=start, - protect_system=protect_system - )) - - try: - if os.geteuid() == 0: - print("\n# WARNING: Running as root is not recommended!", file=sys.stderr) - except AttributeError: - # No os.getuid() on Windows - pass - - if sys.stdout.isatty(): - print("\n# Example usage:", file=sys.stderr) - print("# python -m shoginet systemd | sudo tee /etc/systemd/system/shoginet.service", file=sys.stderr) - print("# sudo systemctl enable shoginet.service", file=sys.stderr) - print("# sudo systemctl start shoginet.service", file=sys.stderr) - print("#", file=sys.stderr) - print( - "# Live view of the log: sudo journalctl --follow -u shoginet", file=sys.stderr) diff --git a/test/main.ts b/test/main.ts new file mode 100644 index 0000000..76fb501 --- /dev/null +++ b/test/main.ts @@ -0,0 +1,96 @@ +import { spawn } from 'node:child_process'; +import http, { type IncomingMessage, type ServerResponse } from 'node:http'; +import config from 'config'; +import { StatusCodes } from 'http-status-codes'; +import serverConfig from './server-config.json' with { type: 'json' }; +import { type WorkDefinition, works } from './works.js'; + +function main() { + const worksInProgress = new Map(); + + const server = http.createServer( + async (req: IncomingMessage, res: ServerResponse) => { + const getNextWorkOrFinish = () => { + const next = works.shift(); + if (!next) { + console.log('All tests finished'); + process.exit(0); + } + + console.log(`Started work: ${next.name}`); + worksInProgress.set(next.path, next); + return next; + }; + + switch (req.url) { + case '/shoginet/config': { + res.writeHead(StatusCodes.OK, { 'Content-Type': 'application/json' }); + return res.end(JSON.stringify(serverConfig)); + } + case '/shoginet/acquire': { + const next = getNextWorkOrFinish(); + + res.writeHead(StatusCodes.ACCEPTED, { + 'Content-Type': 'application/json', + }); + return res.end(JSON.stringify(next.work)); + } + default: { + const curWork = req.url && worksInProgress.get(req.url); + if (!curWork) { + console.error(`✖ No work in progress`); + process.exit(1); + } + try { + const chunks: Buffer[] = []; + for await (const c of req) chunks.push(c); + const body = chunks.length + ? JSON.parse(Buffer.concat(chunks).toString()) + : {}; + + const validated = curWork.validate(body); + // allow undefined for progress reports + if (validated !== undefined) { + if (validated) console.log(`✔ ${curWork.name} passed validation`); + else console.error(`✖ ${curWork.name} failed`); + + const next = getNextWorkOrFinish(); + res.writeHead(StatusCodes.ACCEPTED, { + 'Content-Type': 'application/json', + }); + return res.end(JSON.stringify(next.work)); + } else { + res.writeHead(StatusCodes.NO_CONTENT, { + 'Content-Type': 'application/json', + }); + return res.end(); + } + } catch (e) { + res.writeHead(StatusCodes.INTERNAL_SERVER_ERROR); + return res.end(JSON.stringify({ error: (e as Error).message })); + } + } + } + }, + ); + + const url = new URL(config.get('endpoint')); + server.listen(url.port, () => { + console.log(`Mock server running at ${url.href}`); + }); + + const shoginetProcess = spawn('tsx', ['src/main.ts'], { + env: { + ...process.env, + }, + }); + + shoginetProcess.stdout?.on('data', (chunk) => process.stdout.write(chunk)); + shoginetProcess.stderr?.on('data', (chunk) => process.stderr.write(chunk)); + + shoginetProcess.on('exit', (code) => { + process.exit(code ?? 1); + }); +} + +main(); diff --git a/test/server-config.json b/test/server-config.json new file mode 100644 index 0000000..9791d69 --- /dev/null +++ b/test/server-config.json @@ -0,0 +1,92 @@ +{ + "analysis": { + "movetime": 1500, + "nodes": 0, + "depth": 18 + }, + "puzzle": { + "movetime": 2000, + "depth": 18, + "maxLength": 12 + }, + "move": { + "fairy": { + "1": { + "skill": -9, + "movetime": 50, + "depth": 5 + }, + "2": { + "skill": -5, + "movetime": 100, + "depth": 5 + }, + "3": { + "skill": -1, + "movetime": 150, + "depth": 5 + }, + "4": { + "skill": 3, + "movetime": 200, + "depth": 5 + }, + "5": { + "skill": 6, + "movetime": 300, + "depth": 5 + }, + "6": { + "skill": 9, + "movetime": 400, + "depth": 5 + }, + "7": { + "skill": 12, + "movetime": 500, + "depth": 8 + }, + "8": { + "skill": 15, + "movetime": 1000, + "depth": 13 + }, + "9": { + "skill": 18, + "movetime": 1500, + "depth": 17 + }, + "10": { + "skill": 20, + "movetime": 2000, + "depth": 22 + } + }, + "yaneuraou": { + "5": { + "movetime": 300, + "depth": 2 + }, + "6": { + "movetime": 400, + "depth": 4 + }, + "7": { + "movetime": 500, + "depth": 8 + }, + "8": { + "movetime": 1000, + "depth": 13 + }, + "9": { + "movetime": 1500, + "depth": 17 + }, + "10": { + "movetime": 2500, + "depth": 22 + } + } + } +} \ No newline at end of file diff --git a/test/works.ts b/test/works.ts new file mode 100644 index 0000000..783ecb6 --- /dev/null +++ b/test/works.ts @@ -0,0 +1,100 @@ +import { parseUsi } from 'shogiops/util'; +import type { Work } from '../src/types.js'; + +export interface WorkDefinition { + name: string; + path: string; + work: Work; + validate: (response: any) => boolean | undefined; +} + +function validateBestmove(response: any): boolean { + const bm = response.move?.bestmove; + const parsed = parseUsi(bm); + return !!parsed; +} + +export const works: WorkDefinition[] = [ + { + name: 'Move (yaneuraou)', + path: '/shoginet/move/A', + work: { + work: { + type: 'move', + id: 'A', + level: 5, + clock: { btime: 120000, wtime: 120000, inc: 0, byo: 0 }, + flavor: 'yaneuraou', + }, + game_id: 'xxxxxxxx', + position: + 'lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1', + variant: 'standard', + moves: '4g4f', + }, + validate: validateBestmove, + }, + { + name: 'Move (fairy)', + path: '/shoginet/move/B', + work: { + work: { + type: 'move', + id: 'B', + level: 1, + clock: { btime: 120000, wtime: 120000, inc: 0, byo: 0 }, + flavor: 'fairy', + }, + game_id: 'xxxxxxxx', + position: + 'lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1', + variant: 'standard', + moves: '4g4f', + }, + validate: validateBestmove, + }, + { + name: 'Analysis', + path: '/shoginet/analysis/C', + work: { + work: { + type: 'analysis', + id: 'C', + flavor: 'yaneuraou', + }, + nodes: 1250000, + skipPositions: [0, 1, 2], + game_id: 'xxxxxxxx', + position: + 'lnsgkgsnl/1r5b1/ppppppppp/9/9/9/PPPPPPPPP/1B5R1/LNSGKGSNL b - 1', + variant: 'standard', + moves: '7g7f 3c3d 6i7h 2b8h+ 7i8h 5c5d 2g2f 8b5b 2f2e 5d5e 5i6h', + }, + validate: (response: any) => { + if (response.partial) return; + const skipped = [0, 1, 2].every( + (s) => response.analysis[s].skipped === true, + ); + return skipped && response.analysis.length === 12; + }, + }, + { + name: 'Puzzle', + path: '/shoginet/puzzle/D', + work: { + work: { + type: 'puzzle', + id: 'D', + flavor: 'yaneuraou', + }, + game_id: 'synthetic', + position: + 'lnsgk4/1r3s3/1ppp3pp/p8/5+B3/2P1n4/PP3+bPPP/8R/L1SGKGS1L b GNL2Pn4p 27', + variant: 'standard', + moves: '3i3h', + }, + validate: (response: any) => { + return response.result === true; + }, + }, +]; diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 0000000..70e7a00 --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,36 @@ +{ + "compilerOptions": { + "rootDir": ".", + "module": "nodenext", + "target": "esnext", + "lib": ["esnext"], + "types": ["node"], + + "sourceMap": true, + "declaration": true, + "declarationMap": true, + + // Stricter Typechecking Options + // "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true, + + // Style Options + // "noImplicitReturns": true, + // "noImplicitOverride": true, + // "noUnusedLocals": true, + // "noUnusedParameters": true, + // "noFallthroughCasesInSwitch": true, + // "noPropertyAccessFromIndexSignature": true, + + "esModuleInterop": true, + "composite": true, + "isolatedDeclarations": true, + + // Recommended Options + "strict": true, + "verbatimModuleSyntax": true, + "isolatedModules": true, + "noUncheckedSideEffectImports": true, + "noEmit": true + } +} diff --git a/util.py b/util.py deleted file mode 100644 index f983758..0000000 --- a/util.py +++ /dev/null @@ -1,94 +0,0 @@ -import typing -import os -import sys -import platform -import urllib.parse as urlparse -import math -from errors import ConfigError -from cpuid import detect_cpu_capabilities - - -def parse_bool(inp: typing.Any, default: bool = False) -> bool: - if not inp or isinstance(inp, str): - return default - - inp = inp.strip().lower() - if not inp: - return default - - if inp in ["y", "j", "yes", "yep", "true", "t", "1", "ok"]: - return True - elif inp in ["n", "no", "nop", "nope", "f", "false", "0"]: - return False - else: - raise ConfigError("Not a boolean value: %s", inp) - - -def base_url(url: str) -> str: - url_info = urlparse.urlparse(url) - return "%s://%s/" % (url_info.scheme, url_info.hostname) - - -def yaneuraou_filename() -> str: - machine = platform.machine().lower() - - vendor, modern, bmi2, sse42, avx2 = detect_cpu_capabilities() - if sse42 and "Intel" in vendor and avx2: - suffix = "-AVX2" - elif sse42 and "Intel" in vendor: - suffix = "-SSE42" - else: - suffix = "" - - if os.name == "nt": - return "YaneuraOu-%s%s.exe" % (machine, suffix) - elif os.name == "os2" or sys.platform == "darwin": - return "YaneuraOu-by-gcc" - else: - return "YaneuraOu-by-gcc%s" % suffix - - -def fairy_filename() -> str: - machine = platform.machine().lower() - - vendor, modern, bmi2, sse42, avx2 = detect_cpu_capabilities() - if modern and "Intel" in vendor and bmi2: - suffix = "-bmi2" - elif modern: - suffix = "-modern" - else: - suffix = "" - - if os.name == "nt": - return "fairy-stockfish-largeboard_x86-64%s%s.exe" % (machine, suffix) - elif os.name == "os2" or sys.platform == "darwin": - return "fairy-stockfish-largeboard_x86-64" - else: - return "fairy-stockfish-largeboard_x86-64%s" % suffix - - -def encode_score(kind: str, value: int) -> int: - if kind == "mate": - if value > 0: - return 102_000 - value - else: - return -102_000 - value - else: - return min(max(value, -100_000), 100_000) - -def decode_score(score: int) -> typing.Any: - if score > 100_000: - return {"mate": 102_000 - score} - elif score < -100_000: - return {"mate": -102_000 - score} - else: - return {"cp": score} - -def win_chances(score: int) -> float: - """ - winning chances from -1 to 1 https://graphsketch.com/?eqn1_color=1&eqn1_eqn=100+*+%282+%2F+%281+%2B+exp%28-0.0007+*+x%29%29+-+1%29&eqn2_color=2&eqn2_eqn=&eqn3_color=3&eqn3_eqn=&eqn4_color=4&eqn4_eqn=&eqn5_color=5&eqn5_eqn=&eqn6_color=6&eqn6_eqn=&x_min=-7000&x_max=7000&y_min=-100&y_max=100&x_tick=100&y_tick=10&x_label_freq=2&y_label_freq=2&do_grid=0&do_grid=1&bold_labeled_lines=0&bold_labeled_lines=1&line_width=4&image_w=850&image_h=525 - """ - if abs(score) > 100_000: - return 1 if score > 0 else -1 - - return 2 / (1 + math.exp(-0.0007 * score)) - 1 \ No newline at end of file diff --git a/worker.py b/worker.py deleted file mode 100644 index 9dbaa57..0000000 --- a/worker.py +++ /dev/null @@ -1,513 +0,0 @@ -import threading -import configparser -import progressReporter -import platform -import typing -import util -import random -import consts -import json -import requests -from errors import UpdateRequired -from config import get_endpoint, get_engine_dir, conf_get, get_yaneuraou_command, get_fairy_command, get_key -from logger import log -import time -from engines import Engine - - -class Worker(threading.Thread): - def __init__(self, conf: configparser.ConfigParser, threads: int, memory: int, progress_reporter: progressReporter.ProgressReporter) -> None: - super(Worker, self).__init__() - self.conf = conf - self.threads = threads - self.memory = memory // 2 # split between fairy and yane - - self.progress_reporter = progress_reporter - - self.alive = True - self.fatal_error: typing.Optional[Exception] = None - self.finished = threading.Event() - self.sleep = threading.Event() - self.status_lock = threading.RLock() - - self.nodes = 0 - self.positions = 0 - - self.engines_lock = threading.RLock() - self.yaneuraou: typing.Optional[Engine] = None - self.fairy: typing.Optional[Engine] = None - self.yaneuraou_info: typing.Any = None - self.fairy_info: typing.Any = None - - self.job = None - self.backoff = start_backoff(self.conf) - - self.http = requests.Session() - self.http.mount( - "http://", requests.adapters.HTTPAdapter(max_retries=1)) - self.http.mount( - "https://", requests.adapters.HTTPAdapter(max_retries=1)) - - def set_name(self, name: str) -> None: - self.name = name - if self.progress_reporter: - self.progress_reporter.name = "%s (P)" % (name, ) - - def stop(self) -> None: - with self.status_lock: - self.alive = False - self.kill_engine() - self.sleep.set() - - def stop_soon(self) -> None: - with self.status_lock: - self.alive = False - self.sleep.set() - - def is_alive(self) -> bool: - with self.status_lock: - return self.alive - - def report_and_fetch(self, path: str, result: typing.Any, params: typing.Any) -> typing.Any: - return self.http.post(get_endpoint(self.conf, path), - params=params, - json=result, - timeout=consts.HTTP_TIMEOUT) - - def run(self) -> None: - try: - while self.is_alive(): - self.run_inner() - except UpdateRequired as error: - self.fatal_error = error - except Exception as error: - self.fatal_error = error - log.exception("Fatal error in worker") - finally: - self.finished.set() - - def run_inner(self) -> None: - try: - # Check if the engine is still alive and start, if necessary - self.start_engines() - - # Do the next work unit - path, request = self.work() - except consts.DEAD_ENGINE_ERRORS: - alive = self.is_alive() - if alive: - t = next(self.backoff) - log.exception( - "Engine process has died. Backing off %0.1fs", t) - - # Abort current job - self.abort_job() - - if alive: - self.sleep.wait(t) - self.kill_engine() - - return - - try: - # Report result and fetch next job unless stopping and no results to report - params = {} - if not self.is_alive(): - params["stop"] = "true" - if "stop" in params and path == "acquire": - response = None - else: - response = self.report_and_fetch(path, request, params) - - except requests.RequestException as err: - self.job = None - t = next(self.backoff) - log.error( - "Backing off %0.1fs after failed request (%s)", t, err) - self.sleep.wait(t) - else: - # Handle response. - if response is None or response.status_code == 204: - self.job = None - t = next(self.backoff) - log.debug("No job received. Backing off %0.1fs", t) - self.sleep.wait(t) - elif response.status_code == 202: - log.debug("Got job: %s", response.text) - self.job = response.json() - self.backoff = start_backoff(self.conf) - elif 500 <= response.status_code <= 599: - self.job = None - t = next(self.backoff) - log.error("Server error: HTTP %d %s. Backing off %0.1fs", - response.status_code, response.reason, t) - self.sleep.wait(t) - elif 400 <= response.status_code <= 499: - self.job = None - t = next(self.backoff) + \ - (60 if response.status_code == 429 else 0) - try: - log.debug("Client error: HTTP %d %s: %s", - response.status_code, response.reason, response.text) - error = response.json()["error"] - log.error(error) - - if "Please restart shoginet to upgrade." in error: - log.error("Stopping worker for update.") - raise UpdateRequired() - except (KeyError, ValueError): - log.error("Client error: HTTP %d %s. Backing off %0.1fs. Request was: %s", - response.status_code, response.reason, t, json.dumps(request)) - self.sleep.wait(t) - else: - self.job = None - t = next(self.backoff) - log.error( - "Unexpected HTTP status for acquire: %d", response.status_code) - self.sleep.wait(t) - - def abort_job(self) -> None: - if self.job is None: - return - - log.debug("Aborting job %s", self.job["work"]["id"]) - - try: - response = requests.post(get_endpoint(self.conf, "abort/%s" % self.job["work"]["id"]), - data=json.dumps(self.make_request()), - timeout=consts.HTTP_TIMEOUT) - if response.status_code == 204: - log.info("Aborted job %s", self.job["work"]["id"]) - else: - log.error( - "Unexpected HTTP status for abort: %d", response.status_code) - except requests.RequestException: - log.exception("Could not abort job. Continuing.") - - self.job = None - - def kill_engine(self) -> None: - with self.engines_lock: - if self.yaneuraou: - try: - del self.yaneuraou - except OSError: - log.exception("Failed to kill engine process.") - self.yaneuraou = None - - def start_engines(self) -> None: - def start_fairy() -> None: - if not self.fairy or self.fairy.engine_proccess.poll() is not None: - self.fairy = Engine(True, get_fairy_command(self.conf, False), - get_engine_dir(self.conf)) - self.fairy_info = typing.cast(typing.Any, self.fairy.usi()) - self.fairy_info.pop("author", None) - log.info("Started %s, threads: %s (%d), pid: %d", - self.fairy_info.get("name", "Fairy stockfish "), - "+" * self.threads, self.threads, self.fairy.engine_proccess.pid) - self.fairy_info["options"] = {} - self.fairy_info["options"]["Threads"] = str(self.threads) - self.fairy_info["options"]["USI_Hash"] = str(self.memory) - # Custom options - if self.conf.has_section("Fairy"): - for name, value in self.conf.items("Fairy"): - self.fairy_info["options"][name] = value - for name, value in self.fairy_info["options"].items(): - self.fairy.setoption(name, value) - self.fairy.isready() - - def start_yane() -> None: - if not self.yaneuraou or self.yaneuraou.engine_proccess.poll() is not None: - self.yaneuraou = Engine(False, get_yaneuraou_command(self.conf, False), - get_engine_dir(self.conf)) - self.yaneuraou_info = typing.cast( - typing.Any, self.yaneuraou.usi()) - self.yaneuraou_info.pop("author", None) - log.info("Started %s, threads: %s (%d), pid: %d", - self.yaneuraou_info.get("name", "YaneuraOu "), - "+" * self.threads, self.threads, self.yaneuraou.engine_proccess.pid) - self.yaneuraou_info["options"] = {} - self.yaneuraou_info["options"]["Threads"] = str(self.threads) - self.yaneuraou_info["options"]["USI_Hash"] = str(self.memory) - self.yaneuraou_info["options"]["EnteringKingRule"] = "CSARule27H" - self.yaneuraou_info["options"]["BookFile"] = "no_book" - self.yaneuraou_info["options"]["ConsiderationMode"] = "true" - self.yaneuraou_info["options"]["OutputFailLHPV"] = "true" - # Custom options - if self.conf.has_section("YaneuraOu"): - for name, value in self.conf.items("YaneuraOu"): - self.yaneuraou_info["options"][name] = value - for name, value in self.yaneuraou_info["options"].items(): - self.yaneuraou.setoption(name, value) - self.yaneuraou.isready() - - with self.engines_lock: - # Checks if already running. - start_fairy() - start_yane() - - def make_request(self) -> typing.Any: - return { - "shoginet": { - "version": consts.SN_VERSION, - "python": platform.python_version(), - "apikey": get_key(self.conf), - }, - "yaneuraou": self.yaneuraou_info, - "fairy": self.fairy_info, - } - - def work(self) -> typing.Tuple[str, typing.Any]: - result = self.make_request() - - if self.job and self.job["work"]["type"] == "analysis": - result = self.analysis(self.job) - return "analysis" + "/" + self.job["work"]["id"], result - elif self.job and self.job["work"]["type"] == "move": - result = self.bestmove(self.job) - return "move" + "/" + self.job["work"]["id"], result - elif self.job and self.job["work"]["type"] == "puzzle": - result = self.puzzle(self.job) - return "puzzle" + "/" + self.job["work"]["id"], result - else: - if self.job: - log.error("Invalid job type: %s", self.job["work"]["type"]) - - return "acquire", result - - def job_name(self, job: typing.Any, ply: typing.Optional[int] = None) -> str: - builder = [] - if job["work"]["type"] == "puzzle": - builder.append("Puzzle - ") - builder.append(job["work"]["id"]) - elif job.get("game_id"): - builder.append(util.base_url(get_endpoint(self.conf))) - builder.append(job["game_id"]) - else: - builder.append(job["work"]["id"]) - if ply is not None: - builder.append("#") - builder.append(str(ply)) - return "".join(builder) - - def bestmove(self, job: typing.Any) -> str: - lvl = job["work"]["level"] - lvlIndex = lvl - 1 - variant = job.get("variant", "standard") - useFairy = job["work"].get("flavor", "yaneuraou") == "fairy" - moves = job["moves"].split() - - log.debug("Playing %s with lvl %d", - self.job_name(job), lvl) - - if useFairy: - engine = self.fairy - else: - engine = self.yaneuraou - - assert engine is not None - engine.set_variant_options(variant) - if useFairy: - engine.setoption("Skill_Level", consts.LVL_SKILL[lvlIndex]) - else: - engine.setoption("SkillLevel", max(consts.LVL_SKILL[lvlIndex], 0)) - engine.setoption("MultiPV", "1") - engine.send("usinewgame") - engine.isready() - - movetime = int( - round(consts.LVL_MOVETIMES[lvlIndex] / (self.threads * 0.9 ** (self.threads - 1)))) - start = time.time() - engine.go(job["position"], moves, - movetime=movetime, clock=job["work"].get( - "clock"), - depth=consts.LVL_DEPTHS[lvlIndex], nodes=(None if useFairy else consts.LVL_NODES[lvlIndex])) - bestmove = engine.recv_bestmove() - end = time.time() - - log.log(consts.PROGRESS, "Engine(%s) played move(%s) in %s (%s) with lvl %d: %0.3fs elapsed", - engine.name, bestmove, self.job_name(job), variant, - lvl, end - start) - - self.positions += 1 - - result = self.make_request() - result["move"] = { - "bestmove": bestmove, - } - return result - - def analysis(self, job: typing.Any) -> typing.Any: - variant = job.get("variant", "standard") - useFairy = job["work"].get("flavor", "yaneuraou") == "fairy" - moves = job["moves"].split() - - result = self.make_request() - start = last_progress_report = time.time() - - multipv = job.get("multipv") - nodes = job.get("nodes") or 3500000 - skip = job.get("skipPositions", []) - - if useFairy: - engine = self.fairy - else: - engine = self.yaneuraou - - assert engine is not None - engine.set_variant_options(variant) - if useFairy: - engine.setoption("Skill_Level", '20') - else: - engine.setoption("SkillLevel", '20') - engine.setoption("MultiPV", multipv or '1') - if (useFairy): - engine.setoption("USI_AnalyseMode", 'true') - engine.send("usinewgame") - engine.isready() - - if multipv is None: - result["analysis"] = [None for _ in range(len(moves) + 1)] - else: - result["analysis"] = { - "time": [[] for _ in range(len(moves) + 1)], - "nodes": [[] for _ in range(len(moves) + 1)], - "score": [[] for _ in range(len(moves) + 1)], - "pv": [[] for _ in range(len(moves) + 1)], - } - - num_positions = 0 - - for ply in range(len(moves), -1, -1): - if ply in skip: - result["analysis"][ply] = {"skipped": True} - continue - - if last_progress_report + consts.PROGRESS_REPORT_INTERVAL < time.time(): - if self.progress_reporter: - self.progress_reporter.send(job, result) - last_progress_report = time.time() - - log.log(consts.PROGRESS, "Analysing: %s", - self.job_name(job, ply)) - - engine.go(job["position"], moves[0:ply], - nodes=nodes, movetime=7000) - scores, nodes, times, pvs = engine.recv_analysis() - if multipv is None: - depth = len(scores[0]) - 1 - result["analysis"][ply] = { - "depth": depth, - "score": util.decode_score(scores[0][depth]), - } - try: - result["analysis"][ply]["nodes"] = n = nodes[0][depth] - result["analysis"][ply]["time"] = t = times[0][depth] - if t > 200: - result["analysis"][ply]["nps"] = n * 1000 // t - except IndexError: - pass - try: - result["analysis"][ply]["pv"] = pvs[0][depth] - except IndexError: - pass - else: - result["analysis"]["time"][ply] = times - result["analysis"]["nodes"][ply] = nodes - result["analysis"]["score"][ply] = scores - result["analysis"]["pv"][ply] = pvs - - try: - self.nodes += nodes[0][-1] - except IndexError: - pass - self.positions += 1 - num_positions += 1 - - end = time.time() - - if num_positions: - log.info("%s took %0.1fs (%0.2fs per position - %s)", - self.job_name(job), - end - start, (end - start) / num_positions, engine.name) - else: - log.info("%s done (nothing to do)", self.job_name(job)) - - return result - - def puzzle(self, job: typing.Any) -> typing.Any: - useFairy = job["work"].get("flavor", "yaneuraou") == "fairy" - moves = job["moves"].split() - movesLen = len(moves) - position = job["position"] - turn = position.split(" ")[1] != "w" # True for sente - winnerTurn = turn if movesLen % 2 == 0 else not turn - - result = self.make_request() - start = last_progress_report = time.time() - - if useFairy: - engine = self.fairy - else: - engine = self.yaneuraou - - assert engine is not None - engine.set_variant_options("standard") - if useFairy: - engine.setoption("Skill_Level", '20') - else: - engine.setoption("SkillLevel", '20') - engine.setoption("MultiPV", '3') - if (useFairy): - engine.setoption("USI_AnalyseMode", 'true') - engine.send("usinewgame") - engine.isready() - - num_positions = 0 - - turn = winnerTurn - - start = time.time() - while True: - num_positions += 1 - engine.go(position, moves, depth=18, movetime='3000') - bestmove, scores = engine.recv_puzzle_analysis() - if bestmove is None or bestmove == "win" or (turn == winnerTurn and is_ambiguous(scores)): - break - else: - moves.append(bestmove) - turn = not turn - - end = time.time() - - found = len(moves) > movesLen - - if found: - log.info("%s found after %0.1fs (%0.2fs per position - %s)", - self.job_name(job), - end - start, (end - start) / num_positions, engine.name) - else: - log.log(consts.PROGRESS, "Engine(%s) is looking for new puzzles (%s) - %0.1fs", - engine.name, self.job_name(job), end - start) - - result["result"] = found - return result - -def is_ambiguous(scores: typing.List[int]) -> bool: - if len(scores) <= 1: - return False - best_score = scores[0] - second_score = scores[1] - if util.win_chances(best_score) < util.win_chances(second_score) + 0.33: - return True - return False - -def start_backoff(conf: configparser.ConfigParser) -> typing.Generator[float, None, None]: - if util.parse_bool(conf_get(conf, "FixedBackoff")): - while True: - yield random.random() * consts.MAX_FIXED_BACKOFF - else: - backoff = 1.0 - while True: - yield 0.5 * backoff + 0.5 * backoff * random.random() - backoff = min(backoff + 1, consts.MAX_BACKOFF) From 86fa44f496a3d4a734bf7de17d6be26c351f5b75 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sat, 11 Oct 2025 20:29:30 +0200 Subject: [PATCH 02/31] Same values in local config --- config/local.json | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/config/local.json b/config/local.json index 9e26dfe..3f2fa58 100644 --- a/config/local.json +++ b/config/local.json @@ -1 +1,18 @@ -{} \ No newline at end of file +{ + "workers": 1, + "engines": { + "yaneuraou": { + "path": "./engines/YaneuraOu-by-gcc", + "threads": 1, + "memory": 64 + }, + "fairy": { + "path": "./engines/fairy-stockfish", + "threads": 1, + "memory": 64 + } + }, + "logger": "info", + "endpoint": "https://lishogi.org", + "key": "" +} \ No newline at end of file From 3a96b420a0b841e5bc7b5eb2d83d3e935d3af6c5 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sat, 11 Oct 2025 20:36:16 +0200 Subject: [PATCH 03/31] Copy default config on preinstall --- .gitignore | 3 ++- config/local.json | 18 ------------------ package-lock.json | 1 + package.json | 2 ++ scripts/copy-config.js | 9 +++++++++ 5 files changed, 14 insertions(+), 19 deletions(-) delete mode 100644 config/local.json create mode 100644 scripts/copy-config.js diff --git a/.gitignore b/.gitignore index 7f32e4b..a8acff3 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ .vscode -node_modules \ No newline at end of file +node_modules +config/local.json \ No newline at end of file diff --git a/config/local.json b/config/local.json deleted file mode 100644 index 3f2fa58..0000000 --- a/config/local.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "workers": 1, - "engines": { - "yaneuraou": { - "path": "./engines/YaneuraOu-by-gcc", - "threads": 1, - "memory": 64 - }, - "fairy": { - "path": "./engines/fairy-stockfish", - "threads": 1, - "memory": 64 - } - }, - "logger": "info", - "endpoint": "https://lishogi.org", - "key": "" -} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 44b3f61..7218e1b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7,6 +7,7 @@ "": { "name": "shoginet", "version": "5.0.0", + "hasInstallScript": true, "license": "AGPL-3.0-or-later", "dependencies": { "@biomejs/biome": "2.2.4", diff --git a/package.json b/package.json index 0a83d11..5a956cb 100644 --- a/package.json +++ b/package.json @@ -31,8 +31,10 @@ "typescript": "^5.9.2" }, "scripts": { + "preinstall": "$npm_execpath run copy-config", "start": "tsx ./src/main.ts", "systemd": "tsx ./src/systemd.ts", + "copy-config": "node ./scripts/copy-config.js", "test": "NODE_ENV=test tsx ./test/main.ts", "build-engines": "$npm_execpath run yaneuraou && $npm_execpath run fairy", "build-yaneuraou": "./scripts/yaneuraou.sh", diff --git a/scripts/copy-config.js b/scripts/copy-config.js new file mode 100644 index 0000000..0be8baa --- /dev/null +++ b/scripts/copy-config.js @@ -0,0 +1,9 @@ +import fs from "fs"; + +const src = "config/default.json"; +const dest = "config/local.json"; + +if (!fs.existsSync(dest)) { + fs.copyFileSync(src, dest); + console.log(`Created ${dest} from ${src}`); +} \ No newline at end of file From 2aaf55de704ff12b3cd15a372a7a8c7cb25baef7 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 03:19:01 +0100 Subject: [PATCH 04/31] Update readme --- README.md | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 875a59d..8b4eb8e 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ ## Installation -```bash +```console git clone https://github.com/WandererXII/shoginet.git cd shoginet npm install @@ -23,28 +23,28 @@ Do not forget to check if path to engines is correct set (next step) Configuration is stored in `config` directory. Write your own overrides to `local.json`. -Most importantly you want to make sure that engine path is correctly set. By default we look into `engines` directory. _Yaneuraou_ engine default name is `YaneuraOu-by-gcc` and _Fairy Stockfish_ default nane is `fairy-stockfish` +Most importantly you want to make sure that engine path is correctly set. By default we look into `engines` directory. _Yaneuraou_ engine default name is `YaneuraOu-by-gcc` and _Fairy Stockfish_ default name is `fairy-stockfish` ## Usage **Run tests first** to make sure everything works, especially the engines: -```bash +```console npm run test ``` -You can start Shoginet directly by running: +If tests pass successfully, you can start Shoginet directly by running: -```bash +```console npm run start ``` You will probably want to run Shoginet with a process manager. For systemd (Linux) integration: -```bash +```console npm run systemd > /etc/systemd/system/shoginet.service sudo systemctl daemon-reload -sudo systemctl enable --now shoginet # enable and start +sudo systemctl start shoginet ``` ## Shoginet workflow @@ -52,23 +52,23 @@ sudo systemctl enable --now shoginet # enable and start 1. **Start!** - Shoginet is initiated and fetches config from the server. The config sets parameters for move generation, analysis and puzzle verification. -2. **Request Work** +2. **Request work** - Shoginet -> Lishogi: "Give me work!" -3. **Receive Game** +3. **Receive work** - Lishogi -> Shoginet: "Here's a game to analyse" - - The work could be _analysis_, _move generation_ or _puzle verification_ + - The work could be _analysis_, _move generation_ or _puzle verification_. Or nothing, if the queue is empty. -4. **Analyze** - - Shoginet is working... +4. **Process work** + - Shoginet is analyzing the game... - This consumes CPU 5. **Submit Results** - - Shoginet -> Lishogi: "Analysis result" + - Shoginet -> Lishogi: "Here are the analysis result" 6. **Repeat** - Lishogi -> Shoginet: "Thanks, here's more work :)" - Rinse & repeat 7. **Stop** - - Stop Shoginet when you need CPU power + - Stop Shoginet when you need CPU power. Shoginet will try to finish the work in progress and only then exit, if you wish to abort immediately press CTRL^C again. From 321fa184289c6b2408a73e4ff819abf26cbc7f2e Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 03:23:53 +0100 Subject: [PATCH 05/31] tsconfig - satisfy tsgo --- tsconfig.json | 1 + 1 file changed, 1 insertion(+) diff --git a/tsconfig.json b/tsconfig.json index 70e7a00..e57c3e5 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -22,6 +22,7 @@ // "noFallthroughCasesInSwitch": true, // "noPropertyAccessFromIndexSignature": true, + "resolveJsonModule": true, "esModuleInterop": true, "composite": true, "isolatedDeclarations": true, From 9735441c5ce3dc2ce4eb3b9ece58184f6f20bfcf Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 23:02:53 +0100 Subject: [PATCH 06/31] Add funding.yml --- .github/FUNDING.yml | 1 + 1 file changed, 1 insertion(+) create mode 100644 .github/FUNDING.yml diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..771d2e4 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +custom: https://lishogi.org/patron From a04e3924414048df8113b588827c4a78129e1afd Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 23:03:09 +0100 Subject: [PATCH 07/31] Fix build-engines script --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 5a956cb..5dcab38 100644 --- a/package.json +++ b/package.json @@ -36,7 +36,7 @@ "systemd": "tsx ./src/systemd.ts", "copy-config": "node ./scripts/copy-config.js", "test": "NODE_ENV=test tsx ./test/main.ts", - "build-engines": "$npm_execpath run yaneuraou && $npm_execpath run fairy", + "build-engines": "$npm_execpath run build-yaneuraou && $npm_execpath run build-fairy", "build-yaneuraou": "./scripts/yaneuraou.sh", "build-fairy": "./scripts/fairy.sh", "format": "biome format", From 09633e097917a969b9f51de35c64996d3905584e Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 23:07:05 +0100 Subject: [PATCH 08/31] Add workflow and remove format script It's part of biome check --- .github/workflows/lint.yml | 11 +++++++++++ package.json | 7 +++---- 2 files changed, 14 insertions(+), 4 deletions(-) create mode 100644 .github/workflows/lint.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..efe34e6 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,11 @@ +name: Lint +on: [push, pull_request] +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + - run: npm install + - run: npm run type-check + - run: npm run check \ No newline at end of file diff --git a/package.json b/package.json index 5dcab38..0fc32aa 100644 --- a/package.json +++ b/package.json @@ -39,9 +39,8 @@ "build-engines": "$npm_execpath run build-yaneuraou && $npm_execpath run build-fairy", "build-yaneuraou": "./scripts/yaneuraou.sh", "build-fairy": "./scripts/fairy.sh", - "format": "biome format", - "format:write": "biome format --write", - "lint": "biome check", - "lint:write": "biome check --write" + "check": "biome check", + "check:write": "biome check --write", + "type-check": "tsc --noEmit" } } From fbf1aa6999ebec2eaf8dd1b97fc41b50f9dc7645 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 23:33:44 +0100 Subject: [PATCH 09/31] Move systemd to scripts npm pollutes the output with project and command --- README.md | 2 +- package.json | 1 - src/systemd.ts => scripts/systemd.js | 1 + 3 files changed, 2 insertions(+), 2 deletions(-) rename src/systemd.ts => scripts/systemd.js (95%) diff --git a/README.md b/README.md index 8b4eb8e..1ab9398 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ npm run start You will probably want to run Shoginet with a process manager. For systemd (Linux) integration: ```console -npm run systemd > /etc/systemd/system/shoginet.service +node ./scripts/systemd.js > /etc/systemd/system/shoginet.service sudo systemctl daemon-reload sudo systemctl start shoginet ``` diff --git a/package.json b/package.json index 0fc32aa..4c7cdbc 100644 --- a/package.json +++ b/package.json @@ -33,7 +33,6 @@ "scripts": { "preinstall": "$npm_execpath run copy-config", "start": "tsx ./src/main.ts", - "systemd": "tsx ./src/systemd.ts", "copy-config": "node ./scripts/copy-config.js", "test": "NODE_ENV=test tsx ./test/main.ts", "build-engines": "$npm_execpath run build-yaneuraou && $npm_execpath run build-fairy", diff --git a/src/systemd.ts b/scripts/systemd.js similarity index 95% rename from src/systemd.ts rename to scripts/systemd.js index 246b88c..97f7312 100644 --- a/src/systemd.ts +++ b/scripts/systemd.js @@ -11,6 +11,7 @@ After=network-online.target Wants=network-online.target [Service] +ExecStartPre=/usr/bin/npm run test ExecStart=/usr/bin/npm run start WorkingDirectory=${cwd} ReadWriteDirectories=${cwd} From d05b89f03750ef569fe016773840f5320d9ee04f Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 7 Dec 2025 23:34:02 +0100 Subject: [PATCH 10/31] Prefix node fs --- scripts/copy-config.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/copy-config.js b/scripts/copy-config.js index 0be8baa..7b8dbd0 100644 --- a/scripts/copy-config.js +++ b/scripts/copy-config.js @@ -1,4 +1,4 @@ -import fs from "fs"; +import fs from "node:fs"; const src = "config/default.json"; const dest = "config/local.json"; From cfecad90c7985f25401fd9b5fbe9b75db9ea32b4 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 8 Dec 2025 18:33:14 +0100 Subject: [PATCH 11/31] Use user npm and node --- scripts/systemd.js | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/scripts/systemd.js b/scripts/systemd.js index 97f7312..cf503e1 100644 --- a/scripts/systemd.js +++ b/scripts/systemd.js @@ -1,18 +1,25 @@ import { execSync } from 'node:child_process'; import process from 'node:process'; +import path from 'node:path'; function systemdConfig() { const cwd = process.cwd(); const user = execSync('whoami').toString().trim(); const group = execSync('id -gn').toString().trim(); + const npmPath = execSync('which npm').toString().trim(); + const nodeBinDir = path.dirname(process.execPath); + const envPath = `PATH=${nodeBinDir}:/usr/bin:/bin`; + const output = `[Unit] After=network-online.target Wants=network-online.target [Service] -ExecStartPre=/usr/bin/npm run test -ExecStart=/usr/bin/npm run start +Environment="${envPath}" +ExecStartPre=${npmPath} run test +ExecStart=${npmPath} run start + WorkingDirectory=${cwd} ReadWriteDirectories=${cwd} User=${user} @@ -24,7 +31,7 @@ PrivateDevices=true DevicePolicy=closed ProtectSystem=strict NoNewPrivileges=true -Restart=always +Restart=on-failure RestartSec=5 TimeoutStopSec=300 KillSignal=SIGINT From a506f67660e4bfdd3637c54c41a9601929bd7fcc Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 8 Dec 2025 18:33:46 +0100 Subject: [PATCH 12/31] Prevent local overriding test config --- config/{test.json => local-test.json} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename config/{test.json => local-test.json} (66%) diff --git a/config/test.json b/config/local-test.json similarity index 66% rename from config/test.json rename to config/local-test.json index cb6cdad..d7f734e 100644 --- a/config/test.json +++ b/config/local-test.json @@ -1,4 +1,4 @@ { "endpoint": "http://localhost:1080", - "logger": "debug" + "logger": "info" } \ No newline at end of file From 66c553d67608f4a57fb8af3f5629f7fe7e955b09 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 8 Dec 2025 18:38:53 +0100 Subject: [PATCH 13/31] Decrease MAX_WORKERS --- src/consts.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/consts.ts b/src/consts.ts index 5eb8d05..098d68c 100644 --- a/src/consts.ts +++ b/src/consts.ts @@ -4,7 +4,7 @@ export const HTTP_TIMEOUT_UNIMPORTANT_SECONDS = 10; export const LOGGER_REPORT_INTERVAL_SECONDS: number = 30 * 60; export const SERVER_CONFIG_REFETCH_SECONDS: number = 8 * 60 * 60; export const MAX_BACKOFF_SECONDS = 35.0; -export const MAX_WORKERS = 4; +export const MAX_WORKERS = 3; // for now more than enough export const DEFAULT_ANALYSIS_MOVETIME_SECONDS = 3; export const DEFAULT_MOVE_MOVETIME_SECONDS = 0.5; export const DEFAULT_PUZZLE_MOVETIME_SECONDS = 3; From 45d81669a81d9eb646680ca83a58ad572e429611 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 8 Dec 2025 18:39:20 +0100 Subject: [PATCH 14/31] Minor tweaks --- src/main.ts | 2 ++ src/types.ts | 9 --------- 2 files changed, 2 insertions(+), 9 deletions(-) diff --git a/src/main.ts b/src/main.ts index e6b5fb3..5efc013 100644 --- a/src/main.ts +++ b/src/main.ts @@ -47,9 +47,11 @@ async function main(): Promise { baseLogger.info('Shutting down...'); nextForceShutdown = true; await workerManager.stop(); + baseLogger.info('Workers stopped'); } else { baseLogger.info('Forcing shutdown...'); await workerManager.forceStop(); + baseLogger.info('Workers stopped by force'); } process.exit(0); diff --git a/src/types.ts b/src/types.ts index 6850655..e2750bd 100644 --- a/src/types.ts +++ b/src/types.ts @@ -8,15 +8,6 @@ export interface EngineInfo { [key: string]: any; } -export interface Config { - endpoint: string; - key?: string; - yaneuraouPath: string; - fairyPath: string; - memory?: number; - cores?: number; -} - export interface ScoreResult { mate?: number; cp?: number; From 41414dbfc17ccefb6482be3b482f614783e5fd8a Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 8 Dec 2025 22:08:15 +0100 Subject: [PATCH 15/31] Point default to lishogi --- config/default.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/default.json b/config/default.json index a0ccbd9..4b88e22 100644 --- a/config/default.json +++ b/config/default.json @@ -13,6 +13,6 @@ } }, "logger": "info", - "endpoint": "http://localhost:9663", + "endpoint": "https://lishogi.org", "key": "" } From b163b7f8c3256f5303d6fabb4d2ec4e080512629 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Tue, 9 Dec 2025 03:03:29 +0100 Subject: [PATCH 16/31] Properly shutdown engines in test --- test/main.ts | 57 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 34 insertions(+), 23 deletions(-) diff --git a/test/main.ts b/test/main.ts index 76fb501..e9e1322 100644 --- a/test/main.ts +++ b/test/main.ts @@ -1,4 +1,4 @@ -import { spawn } from 'node:child_process'; +import { type ChildProcessWithoutNullStreams, spawn } from 'node:child_process'; import http, { type IncomingMessage, type ServerResponse } from 'node:http'; import config from 'config'; import { StatusCodes } from 'http-status-codes'; @@ -6,20 +6,30 @@ import serverConfig from './server-config.json' with { type: 'json' }; import { type WorkDefinition, works } from './works.js'; function main() { + let shoginetProcess: ChildProcessWithoutNullStreams; + let exitCode: number = 1; + const worksInProgress = new Map(); const server = http.createServer( async (req: IncomingMessage, res: ServerResponse) => { - const getNextWorkOrFinish = () => { + const getNextWork = () => { const next = works.shift(); if (!next) { - console.log('All tests finished'); - process.exit(0); + console.log('All tests finished successfully!'); + startShutdown(0); + res.writeHead(StatusCodes.NO_CONTENT, { + 'Content-Type': 'application/json', + }); + return res.end(); + } else { + console.log(`Started work: ${next.name}`); + worksInProgress.set(next.path, next); + res.writeHead(StatusCodes.ACCEPTED, { + 'Content-Type': 'application/json', + }); + return res.end(JSON.stringify(next.work)); } - - console.log(`Started work: ${next.name}`); - worksInProgress.set(next.path, next); - return next; }; switch (req.url) { @@ -28,18 +38,15 @@ function main() { return res.end(JSON.stringify(serverConfig)); } case '/shoginet/acquire': { - const next = getNextWorkOrFinish(); - - res.writeHead(StatusCodes.ACCEPTED, { - 'Content-Type': 'application/json', - }); - return res.end(JSON.stringify(next.work)); + return getNextWork(); } default: { const curWork = req.url && worksInProgress.get(req.url); if (!curWork) { console.error(`✖ No work in progress`); - process.exit(1); + startShutdown(1); + res.writeHead(StatusCodes.INTERNAL_SERVER_ERROR); + return res.end(); } try { const chunks: Buffer[] = []; @@ -54,11 +61,7 @@ function main() { if (validated) console.log(`✔ ${curWork.name} passed validation`); else console.error(`✖ ${curWork.name} failed`); - const next = getNextWorkOrFinish(); - res.writeHead(StatusCodes.ACCEPTED, { - 'Content-Type': 'application/json', - }); - return res.end(JSON.stringify(next.work)); + return getNextWork(); } else { res.writeHead(StatusCodes.NO_CONTENT, { 'Content-Type': 'application/json', @@ -79,7 +82,7 @@ function main() { console.log(`Mock server running at ${url.href}`); }); - const shoginetProcess = spawn('tsx', ['src/main.ts'], { + shoginetProcess = spawn('tsx', ['src/main.ts'], { env: { ...process.env, }, @@ -88,9 +91,17 @@ function main() { shoginetProcess.stdout?.on('data', (chunk) => process.stdout.write(chunk)); shoginetProcess.stderr?.on('data', (chunk) => process.stderr.write(chunk)); - shoginetProcess.on('exit', (code) => { - process.exit(code ?? 1); + shoginetProcess.on('exit', () => { + server.close(() => { + console.log(`Exiting with code: ${exitCode}`); + process.exit(exitCode); + }); }); + + function startShutdown(code: number) { + exitCode = code; + shoginetProcess.kill('SIGTERM'); + } } main(); From 5ef75c83680983e0372c21ef88429de5d268b2d8 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:29:26 +0100 Subject: [PATCH 17/31] Less logging --- src/http.ts | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/src/http.ts b/src/http.ts index 060b92a..b152570 100644 --- a/src/http.ts +++ b/src/http.ts @@ -42,6 +42,7 @@ function joinPath(path: string) { return new URL(`shoginet/${path}`, clientConfig.endpoint).toString(); } +let lastLog: number; export async function acquireWork(): Promise { try { const url = joinPath('acquire'); @@ -53,8 +54,11 @@ export async function acquireWork(): Promise { }); const work = processResponse(response); return work; - } catch (err) { - baseLogger.error('Failed to acquire work:', err); + } catch (_) { + if (!lastLog || Date.now() - lastLog > 60 * 1000 * 5) { + baseLogger.error('Failed to acquire work.'); + lastLog = Date.now(); + } return undefined; } } @@ -71,8 +75,8 @@ export async function submitWork( json: og(res), }); return processResponse(response); - } catch (err) { - baseLogger.error('Failed to submit work:', err); + } catch (_) { + baseLogger.error('Failed to submit work:', work); return undefined; } } @@ -83,8 +87,8 @@ export async function abortWork(work: Work): Promise { timeout: { request: HTTP_TIMEOUT_UNIMPORTANT_SECONDS * 1000 }, headers, }); - } catch (err) { - baseLogger.error(`Failed to abort work: ${err}`); + } catch (_) { + baseLogger.error(`Failed to abort work: ${work}`); } } @@ -98,8 +102,8 @@ export async function analysisProgressReport( headers, json: og({ ...res, partial: true }), }); - } catch (err) { - baseLogger.warn(`Failed to submit analysis progress: ${err}`); + } catch (_) { + baseLogger.warn(`Failed to submit analysis progress.`); } } From aab3f2e352de557918748da2242c93d1afb3562a Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:30:25 +0100 Subject: [PATCH 18/31] Rename flavor to engine --- src/types.ts | 8 ++++---- src/worker.ts | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/types.ts b/src/types.ts index e2750bd..95b70c7 100644 --- a/src/types.ts +++ b/src/types.ts @@ -19,7 +19,7 @@ interface BaseWork { work: { type: WorkType; id: string; - flavor: EngineKind; + engine: EngineKind; }; game_id: string; position: string; @@ -32,7 +32,7 @@ export interface MoveWork extends BaseWork { work: { type: 'move'; id: string; - flavor: EngineKind; + engine: EngineKind; level: Level; clock?: { wtime: number; @@ -47,7 +47,7 @@ export interface AnalysisWork extends BaseWork { work: { type: 'analysis'; id: string; - flavor: EngineKind; + engine: EngineKind; }; nodes: number; skipPositions: number[]; @@ -57,7 +57,7 @@ export interface PuzzleWork extends BaseWork { work: { type: 'puzzle'; id: string; - flavor: EngineKind; + engine: EngineKind; }; } diff --git a/src/worker.ts b/src/worker.ts index eb45035..3185fdb 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -126,7 +126,7 @@ export class Worker extends EventEmitter { task(work: Work): void { const workType = work.work.type; - const engine = this.engines[work.work.flavor]; + const engine = this.engines[work.work.engine]; if (!engine || !engine.isActive) { this.logger.error('Engine not found'); From 6b3d617f06aec0e9c55f7d9482519153d0534874 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:32:11 +0100 Subject: [PATCH 19/31] Remove values we don't anymore need from json sent to server --- src/http.ts | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/http.ts b/src/http.ts index b152570..4ea8407 100644 --- a/src/http.ts +++ b/src/http.ts @@ -14,16 +14,13 @@ const headers = { 'shoginet-key': clientConfig.key, }; -function og(res: Record) { +function makeJson(res: Record) { return { ...res, shoginet: { version: pkg.version, - python: 'NO', apikey: clientConfig.key, }, - yaneuraou: { name: 'Y', options: {} }, - fairy: { name: 'F', options: {} }, }; } @@ -50,7 +47,7 @@ export async function acquireWork(): Promise { timeout: { request: HTTP_TIMEOUT_IMPORTANT_SECONDS * 1000 }, headers, throwHttpErrors: false, - json: og({}), + json: makeJson({}), }); const work = processResponse(response); return work; @@ -72,7 +69,7 @@ export async function submitWork( const response = await got.post(url, { timeout: { request: HTTP_TIMEOUT_IMPORTANT_SECONDS * 1000 }, headers, - json: og(res), + json: makeJson(res), }); return processResponse(response); } catch (_) { @@ -100,7 +97,7 @@ export async function analysisProgressReport( await got.post(joinPath(`${work.work.type}/${work.work.id}`), { timeout: { request: HTTP_TIMEOUT_UNIMPORTANT_SECONDS * 1000 }, headers, - json: og({ ...res, partial: true }), + json: makeJson({ ...res, partial: true }), }); } catch (_) { baseLogger.warn(`Failed to submit analysis progress.`); From 52175a0f452b45bc17a02407568eff5f1619f816 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:32:31 +0100 Subject: [PATCH 20/31] Release worker on failure --- src/worker.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/worker.ts b/src/worker.ts index 3185fdb..85b9e7a 100644 --- a/src/worker.ts +++ b/src/worker.ts @@ -83,6 +83,7 @@ export class Worker extends EventEmitter { if (this.currentWork) { this.logger.error('Aborting work due to failue', this.currentWork); abortWork(this.currentWork); + this.release(); } this.initialize(); }; From 9585fed73bbef14c8a1bf62e9ef92ef8b83f7cb3 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:33:47 +0100 Subject: [PATCH 21/31] Stop engine processes properly on test exit --- test/main.ts | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/test/main.ts b/test/main.ts index e9e1322..c3a4f4b 100644 --- a/test/main.ts +++ b/test/main.ts @@ -7,21 +7,29 @@ import { type WorkDefinition, works } from './works.js'; function main() { let shoginetProcess: ChildProcessWithoutNullStreams; - let exitCode: number = 1; + let exitCode: number = 0; + let running = true; const worksInProgress = new Map(); const server = http.createServer( async (req: IncomingMessage, res: ServerResponse) => { + const noWork = () => { + res.writeHead(StatusCodes.NO_CONTENT, { + 'Content-Type': 'application/json', + }); + return res.end(); + }; + const getNextWork = () => { + if (!running) return noWork(); + const next = works.shift(); if (!next) { - console.log('All tests finished successfully!'); - startShutdown(0); - res.writeHead(StatusCodes.NO_CONTENT, { - 'Content-Type': 'application/json', - }); - return res.end(); + console.log('All tests finished'); + running = false; + startShutdown(); + return noWork(); } else { console.log(`Started work: ${next.name}`); worksInProgress.set(next.path, next); @@ -44,7 +52,8 @@ function main() { const curWork = req.url && worksInProgress.get(req.url); if (!curWork) { console.error(`✖ No work in progress`); - startShutdown(1); + exitCode += 1; + startShutdown(); res.writeHead(StatusCodes.INTERNAL_SERVER_ERROR); return res.end(); } @@ -59,14 +68,14 @@ function main() { // allow undefined for progress reports if (validated !== undefined) { if (validated) console.log(`✔ ${curWork.name} passed validation`); - else console.error(`✖ ${curWork.name} failed`); + else { + console.error(`✖ ${curWork.name} failed`); + exitCode += 1; + } return getNextWork(); } else { - res.writeHead(StatusCodes.NO_CONTENT, { - 'Content-Type': 'application/json', - }); - return res.end(); + return noWork(); } } catch (e) { res.writeHead(StatusCodes.INTERNAL_SERVER_ERROR); @@ -98,8 +107,7 @@ function main() { }); }); - function startShutdown(code: number) { - exitCode = code; + function startShutdown() { shoginetProcess.kill('SIGTERM'); } } From baf439dfcc10b677a83e596d24169aedf287b160 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:37:24 +0100 Subject: [PATCH 22/31] Stop workers on available event --- src/worker-manager.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/src/worker-manager.ts b/src/worker-manager.ts index 6441248..8cb5e9c 100644 --- a/src/worker-manager.ts +++ b/src/worker-manager.ts @@ -90,6 +90,7 @@ export class WorkerManager { busyWorkers.forEach((w) => { w.once('result', onDone); w.once('failure', onDone); + w.once('available', onDone); }); }); this.workers.forEach((w) => { From 591e3038486e3c7140ef42e902fffe1742ed78cc Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:38:59 +0100 Subject: [PATCH 23/31] Rewrite puzzle to generate solution and themes --- src/work/puzzle.ts | 184 +++++++++++++++++++++++++++++++++++++++------ 1 file changed, 160 insertions(+), 24 deletions(-) diff --git a/src/work/puzzle.ts b/src/work/puzzle.ts index a0df35f..939195e 100644 --- a/src/work/puzzle.ts +++ b/src/work/puzzle.ts @@ -1,5 +1,6 @@ +import { makeSfen, parseSfen } from 'shogiops/sfen'; import type { Color } from 'shogiops/types'; -import { makeUsi, opposite, parseUsi } from 'shogiops/util'; +import { isDrop, makeUsi, opposite, parseUsi } from 'shogiops/util'; import { DEFAULT_PUZZLE_MOVETIME_SECONDS } from '../consts.js'; import type { Engine } from '../engine.js'; import type { PuzzleWork, ScoreResult } from '../types.js'; @@ -12,31 +13,34 @@ type MultiPvScores3 = [ ScoreResult | undefined, ]; -function winChances(score: ScoreResult): number { - if (score.mate) return score.mate > 0 ? 1 : -1; - else if (score.cp) return 2 / (1 + Math.exp(-0.0007 * score.cp)) - 1; - else return 0; -} - -function isAmbiguous(scores: MultiPvScores3): boolean { - if (scores.filter((s) => !!s).length <= 1) return false; - const bestScore = scores[0]; - const secondScore = scores[1]; - if (!bestScore || !secondScore) return false; - else return winChances(bestScore) < winChances(secondScore) + 0.33; +interface UsiWithScore { + usi: string; + score: ScoreResult | undefined; } export function puzzle(worker: Worker, engine: Engine, work: PuzzleWork): void { worker.logger.debug('Starting puzzle analysis'); - const moves: string[] = work.moves.split(' '); - const initialMovesLength = moves.length; - const position: string = work.position; - - const color: Color = position.split(' ')[1] !== 'w' ? 'sente' : 'gote'; - const winnerColor = initialMovesLength % 2 === 0 ? color : opposite(color); + const shogi = parseSfen('standard', work.position, false); + const workUsis = work.moves.split(' ').filter((m) => m); + const workMoves = workUsis.map((m) => parseUsi(m)); + if (shogi.isErr || workMoves.some((m) => m === undefined)) { + worker.logger.error( + 'Could not parse position or moves', + work.position, + work.moves, + ); + worker.emit('result', work, { result: false }); + return; + } + const workSfen = makeSfen(shogi.value); + // we want to normalize all games to start from POV of the winner + workMoves.forEach((m) => { + shogi.value.play(m!); + }); - const serverConfig = worker.serverConfig.config.puzzle; + const winnerColor = shogi.value.turn; + const initialSfen = makeSfen(shogi.value); if (engine.kind === 'fairy') { engine.setVariant('standard'); @@ -47,6 +51,10 @@ export function puzzle(worker: Worker, engine: Engine, work: PuzzleWork): void { engine.setMultiPv(3); engine.send('usinewgame'); + const serverConfig = worker.serverConfig.config.puzzle; + const resultUsis: string[] = []; + const bestScores: (ScoreResult | undefined)[] = []; + const analysePly = (depth: number, color: Color) => { worker.logger.debug(`Analysing: ${depth} depth`); @@ -72,16 +80,36 @@ export function puzzle(worker: Worker, engine: Engine, work: PuzzleWork): void { (!serverConfig.maxLength || depth < serverConfig.maxLength) && (color !== winnerColor || !isAmbiguous(scores)) ) { - moves.push(makeUsi(parsed)); + resultUsis.push(makeUsi(parsed)); + bestScores.push(scores[0]); analysePly(depth + 1, opposite(color)); } else { - const result = { result: moves.length > initialMovesLength }; + // we need to stop at our move + const trimmedResultUsis = + resultUsis.length % 2 ? resultUsis : resultUsis.slice(0, -1); + const filteredResultUsis = clearFutileInterposition(trimmedResultUsis); + const doWeHavePuzzle = !!filteredResultUsis; + const result: any = { rejected: !doWeHavePuzzle }; + + if (doWeHavePuzzle) { + const puzzleResult = { + sfen: workSfen, + line: [...workUsis, ...filteredResultUsis].join(' '), + themes: detectThemes( + initialSfen, + filteredResultUsis.map((u, i) => { + return { usi: u, score: bestScores[i] }; + }), + ), + }; + result.puzzle = puzzleResult; + } worker.logger.debug('Emitting move result:', result); worker.emit('result', work, result); } }); - engine.search(position, moves, { + engine.search(initialSfen, resultUsis, { movetime: serverConfig.movetime || DEFAULT_PUZZLE_MOVETIME_SECONDS * 1000, depth: serverConfig.depth, nodes: undefined, @@ -89,5 +117,113 @@ export function puzzle(worker: Worker, engine: Engine, work: PuzzleWork): void { }); }; - analysePly(0, color); + analysePly(0, winnerColor); +} + +function winChances(score: ScoreResult): number { + if (score.mate) return score.mate > 0 ? 1 : -1; + else if (score.cp) return 2 / (1 + Math.exp(-0.0007 * score.cp)) - 1; + else return 0; +} + +function isAmbiguous(scores: MultiPvScores3): boolean { + if (scores.filter((s) => !!s).length <= 1) return false; + const bestScore = scores[0]; + const secondScore = scores[1]; + if (!bestScore || !secondScore) return false; + else return winChances(bestScore) < winChances(secondScore) + 0.33; +} + +export function detectThemes( + sfen: string, + scoredUsis: UsiWithScore[], +): string[] { + const themes = new Set(); + + if (scoredUsis.length === 0) return []; + + const depth = scoredUsis.length; + const initialScore = scoredUsis[0].score; + + if (depth === 1) themes.add('oneMove'); + else if (depth === 3) themes.add('short'); + else if (depth === 5) themes.add('long'); + else themes.add('veryLong'); + + if (initialScore?.mate) { + themes.add('mate'); + + if (depth === 1) themes.add('mateIn1'); + else if (depth === 3) themes.add('mateIn3'); + else if (depth === 5) themes.add('mateIn5'); + else if (depth === 7) themes.add('mateIn7'); + else if (depth === 9) themes.add('mateIn9'); + } else if (initialScore?.cp !== undefined) { + const cp = initialScore.cp; + if (cp >= 2000) themes.add('crushing'); + else if (cp >= 700) themes.add('advantage'); + else if (Math.abs(cp) < 350) themes.add('equality'); + } + + if (initialScore?.mate) { + const shogi = parseSfen('standard', sfen, false); + + if (shogi.isOk) { + let isTsume = true; + + for (let i = 0; i < depth; i += 1) { + const moveUsi = scoredUsis[i].usi; + const move = parseUsi(moveUsi); + + if (!move) { + isTsume = false; + break; + } + + shogi.value.play(move); + + if (i % 2 === 0) { + if (!shogi.value.isCheck()) { + isTsume = false; + break; + } + } + } + + if (isTsume) themes.add('tsume'); + } + } + + return Array.from(themes).sort(); +} + +function clearFutileInterposition(usis: string[]): string[] { + function isFutilePair(defenderUsi: string, attackerUsi: string): boolean { + const defMove = parseUsi(defenderUsi); + const atkMove = parseUsi(attackerUsi); + + if (!defMove || !atkMove) return false; + console.log(isDrop(defMove), atkMove.to, defMove.to); + + return isDrop(defMove) && atkMove.to === defMove.to; + } + + let cutoffIndex = usis.length; + + const startIndex = usis.length - 2; + + for (let i = startIndex; i >= 1; i -= 2) { + const defenderUsi = usis[i]; + const attackerUsi = usis[i + 1]; + + console.log('checking', defenderUsi, attackerUsi); + + if (isFutilePair(defenderUsi, attackerUsi)) { + cutoffIndex = i; + } else { + break; + } + } + + return usis.slice(0, cutoffIndex); } From 1f7cf241caf5152f317fcbafb6c042da4af89004 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 12:39:27 +0100 Subject: [PATCH 24/31] More tests --- test/works.ts | 132 +++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 124 insertions(+), 8 deletions(-) diff --git a/test/works.ts b/test/works.ts index 783ecb6..1b3283c 100644 --- a/test/works.ts +++ b/test/works.ts @@ -14,6 +14,69 @@ function validateBestmove(response: any): boolean { return !!parsed; } +interface PuzzleValidationSpec { + result: boolean; + themes: string[]; + sfen: string; + line: string[]; +} + +function validatePuzzle( + response: PuzzleValidationSpec, + expected: PuzzleValidationSpec, +): boolean { + if (response.result !== expected.result) { + console.error('Mismatch: result', { + response: response.result, + expected: expected.result, + }); + return false; + } + + if (!Array.isArray(response.themes)) { + console.error('Invalid: themes is not an array', { + response: response.themes, + }); + return false; + } + + if (response.themes.length !== expected.themes.length) { + console.error('Mismatch: themes length', { + response: response.themes.length, + expected: expected.themes.length, + }); + return false; + } + + if ( + !(response.themes as string[]).every((t) => expected.themes.includes(t)) + ) { + console.error('Mismatch: themes content', { + response: response.themes, + expected: expected.themes, + }); + return false; + } + + if (response.sfen !== expected.sfen) { + console.error('Mismatch: sfen', { + response: response.sfen, + expected: expected.sfen, + }); + return false; + } + + if (JSON.stringify(response.line) !== JSON.stringify(expected.line)) { + console.error('Mismatch: line', { + response: response.line, + expected: expected.line, + }); + return false; + } + + return true; +} + export const works: WorkDefinition[] = [ { name: 'Move (yaneuraou)', @@ -24,7 +87,7 @@ export const works: WorkDefinition[] = [ id: 'A', level: 5, clock: { btime: 120000, wtime: 120000, inc: 0, byo: 0 }, - flavor: 'yaneuraou', + engine: 'yaneuraou', }, game_id: 'xxxxxxxx', position: @@ -43,7 +106,7 @@ export const works: WorkDefinition[] = [ id: 'B', level: 1, clock: { btime: 120000, wtime: 120000, inc: 0, byo: 0 }, - flavor: 'fairy', + engine: 'fairy', }, game_id: 'xxxxxxxx', position: @@ -60,7 +123,7 @@ export const works: WorkDefinition[] = [ work: { type: 'analysis', id: 'C', - flavor: 'yaneuraou', + engine: 'yaneuraou', }, nodes: 1250000, skipPositions: [0, 1, 2], @@ -79,13 +142,13 @@ export const works: WorkDefinition[] = [ }, }, { - name: 'Puzzle', - path: '/shoginet/puzzle/D', + name: 'Puzzle - game (b)', + path: '/shoginet/puzzle/1', work: { work: { type: 'puzzle', - id: 'D', - flavor: 'yaneuraou', + id: '1', + engine: 'yaneuraou', }, game_id: 'synthetic', position: @@ -94,7 +157,60 @@ export const works: WorkDefinition[] = [ moves: '3i3h', }, validate: (response: any) => { - return response.result === true; + const valid = validatePuzzle(response, { + result: true, + themes: ['mate', 'mateIn1', 'oneMove', 'tsume'], + sfen: 'lnsgk4/1r3s3/1ppp3pp/p8/5+B3/2P1n4/PP3+bPPP/8R/L1SGKGS1L b GNL2Pn4p 27', + line: ['3i3h', 'N*6g'], + }); + if (!valid) console.error(response); + return valid; + }, + }, + { + name: 'Puzzle - futile interposition (b)', + path: '/shoginet/puzzle/2', + work: { + work: { + type: 'puzzle', + id: '2', + engine: 'yaneuraou', + }, + game_id: 'synthetic', + position: '9/1kg6/1psg5/2ppp4/9/2P6/1P3+p+p+p+p/9/L6K1 b BSgsnlp 1', + variant: 'standard', + moves: '', + }, + validate: (response: any) => { + return validatePuzzle(response, { + result: true, + themes: ['mate', 'mateIn5', 'tsume', 'long'], + sfen: '9/1kg6/1psg5/2ppp4/9/2P6/1P3+p+p+p+p/9/L6K1 b BSgsnlp 1', + line: ['B*9c', '8b8a', 'S*9b', '8a9b', '9c7a+'], + }); + }, + }, + { + name: 'Puzzle - futile interposition (w)', + path: '/shoginet/puzzle/3', + work: { + work: { + type: 'puzzle', + id: '3', + engine: 'fairy', + }, + game_id: 'synthetic', + position: '9/9/9/9/pPP6/1K7/PS1s5/3+r5/1R2b4 w B2G2S2N2L4P 1', + variant: 'standard', + moves: '', + }, + validate: (response: any) => { + return validatePuzzle(response, { + result: true, + themes: ['mate', 'mateIn1', 'tsume', 'oneMove'], + sfen: '9/9/9/9/pPP6/1K7/PS1s5/3+r5/1R2b4 w B2G2S2N2L4P 1', + line: ['6h7i'], + }); }, }, ]; From c5d1477ce50cf028ff6957f28a43f0f8d2df680a Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 16:52:27 +0100 Subject: [PATCH 25/31] Fix tests --- test/works.ts | 61 ++++++++++++++------------------------------------- 1 file changed, 17 insertions(+), 44 deletions(-) diff --git a/test/works.ts b/test/works.ts index 1b3283c..db1a9a3 100644 --- a/test/works.ts +++ b/test/works.ts @@ -15,61 +15,39 @@ function validateBestmove(response: any): boolean { } interface PuzzleValidationSpec { - result: boolean; themes: string[]; sfen: string; - line: string[]; + line: string; // space separated } function validatePuzzle( response: PuzzleValidationSpec, expected: PuzzleValidationSpec, ): boolean { - if (response.result !== expected.result) { - console.error('Mismatch: result', { - response: response.result, - expected: expected.result, - }); - return false; - } - - if (!Array.isArray(response.themes)) { - console.error('Invalid: themes is not an array', { - response: response.themes, - }); - return false; - } - - if (response.themes.length !== expected.themes.length) { - console.error('Mismatch: themes length', { - response: response.themes.length, - expected: expected.themes.length, - }); - return false; - } - if ( + !Array.isArray(response.themes) || + response.themes.length !== expected.themes.length || !(response.themes as string[]).every((t) => expected.themes.includes(t)) ) { - console.error('Mismatch: themes content', { - response: response.themes, - expected: expected.themes, + console.error('Mismatch: themes', { + response: response, + expected: expected, }); return false; } if (response.sfen !== expected.sfen) { console.error('Mismatch: sfen', { - response: response.sfen, - expected: expected.sfen, + response: response, + expected: expected, }); return false; } - if (JSON.stringify(response.line) !== JSON.stringify(expected.line)) { + if (response.line !== expected.line) { console.error('Mismatch: line', { - response: response.line, - expected: expected.line, + response: response, + expected: expected, }); return false; } @@ -157,14 +135,11 @@ export const works: WorkDefinition[] = [ moves: '3i3h', }, validate: (response: any) => { - const valid = validatePuzzle(response, { - result: true, + return validatePuzzle(response.puzzle, { themes: ['mate', 'mateIn1', 'oneMove', 'tsume'], sfen: 'lnsgk4/1r3s3/1ppp3pp/p8/5+B3/2P1n4/PP3+bPPP/8R/L1SGKGS1L b GNL2Pn4p 27', - line: ['3i3h', 'N*6g'], + line: ['3i3h', 'N*6g'].join(' '), }); - if (!valid) console.error(response); - return valid; }, }, { @@ -182,11 +157,10 @@ export const works: WorkDefinition[] = [ moves: '', }, validate: (response: any) => { - return validatePuzzle(response, { - result: true, + return validatePuzzle(response.puzzle, { themes: ['mate', 'mateIn5', 'tsume', 'long'], sfen: '9/1kg6/1psg5/2ppp4/9/2P6/1P3+p+p+p+p/9/L6K1 b BSgsnlp 1', - line: ['B*9c', '8b8a', 'S*9b', '8a9b', '9c7a+'], + line: ['B*9c', '8b8a', 'S*9b', '8a9b', '9c7a+'].join(' '), }); }, }, @@ -205,11 +179,10 @@ export const works: WorkDefinition[] = [ moves: '', }, validate: (response: any) => { - return validatePuzzle(response, { - result: true, + return validatePuzzle(response.puzzle, { themes: ['mate', 'mateIn1', 'tsume', 'oneMove'], sfen: '9/9/9/9/pPP6/1K7/PS1s5/3+r5/1R2b4 w B2G2S2N2L4P 1', - line: ['6h7i'], + line: ['6h7i'].join(' '), }); }, }, From d4e269a75f694fe25be7ad103aab902e3a46d075 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 16:53:01 +0100 Subject: [PATCH 26/31] Remove console logs --- src/work/puzzle.ts | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/work/puzzle.ts b/src/work/puzzle.ts index 939195e..f6e6cce 100644 --- a/src/work/puzzle.ts +++ b/src/work/puzzle.ts @@ -203,8 +203,6 @@ function clearFutileInterposition(usis: string[]): string[] { const atkMove = parseUsi(attackerUsi); if (!defMove || !atkMove) return false; - console.log(isDrop(defMove), atkMove.to, defMove.to); - return isDrop(defMove) && atkMove.to === defMove.to; } @@ -216,8 +214,6 @@ function clearFutileInterposition(usis: string[]): string[] { const defenderUsi = usis[i]; const attackerUsi = usis[i + 1]; - console.log('checking', defenderUsi, attackerUsi); - if (isFutilePair(defenderUsi, attackerUsi)) { cutoffIndex = i; } else { From b8443f1428e8a57801b68a509c6e095b90e2497d Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Sun, 14 Dec 2025 16:56:08 +0100 Subject: [PATCH 27/31] Better shutdown --- scripts/systemd.js | 2 +- src/worker-manager.ts | 68 +++++++++++++++++++++++++++++++------------ 2 files changed, 51 insertions(+), 19 deletions(-) diff --git a/scripts/systemd.js b/scripts/systemd.js index cf503e1..f015579 100644 --- a/scripts/systemd.js +++ b/scripts/systemd.js @@ -33,7 +33,7 @@ ProtectSystem=strict NoNewPrivileges=true Restart=on-failure RestartSec=5 -TimeoutStopSec=300 +TimeoutStopSec=610 KillSignal=SIGINT KillMode=control-group diff --git a/src/worker-manager.ts b/src/worker-manager.ts index 8cb5e9c..82d978d 100644 --- a/src/worker-manager.ts +++ b/src/worker-manager.ts @@ -76,27 +76,59 @@ export class WorkerManager { this.abort.abort(); const busyWorkers = this.workers.filter((w) => !!w.currentWork); - if (busyWorkers.length) { - await new Promise((resolve) => { - this.logger.info( - 'Waiting for workers to finish, send signal again to force exit.', - ); - let done = 0; - const onDone = () => { - done++; - if (done >= busyWorkers.length) resolve(); + + if (busyWorkers.length === 0) { + this.forceStop(); + return; + } + + this.logger.info( + `Waiting for ${busyWorkers.length} workers to finish or 10m timeout...`, + ); + + const waitForWorkersToFinish = new Promise((resolve) => { + const finishedWorkerIds = new Set(); + + const checkDone = () => { + if (finishedWorkerIds.size >= busyWorkers.length) { + resolve(); + } + }; + + busyWorkers.forEach((w) => { + const onWorkerFinished = () => { + if (!finishedWorkerIds.has(w.index)) { + finishedWorkerIds.add(w.index); + + w.off('result', onWorkerFinished); + w.off('failure', onWorkerFinished); + w.off('available', onWorkerFinished); + + checkDone(); + } }; - busyWorkers.forEach((w) => { - w.once('result', onDone); - w.once('failure', onDone); - w.once('available', onDone); - }); - }); - this.workers.forEach((w) => { - w.stop(); + w.once('result', onWorkerFinished); + w.once('failure', onWorkerFinished); + w.once('available', onWorkerFinished); }); - } else this.forceStop(); + }); + + const maxWaitTime = new Promise((resolve) => { + setTimeout( + () => { + this.logger.warn('Force stop triggered due to 10 minute timeout.'); + resolve(); + }, + 10 * 60 * 1000, + ); + }); + + await Promise.race([waitForWorkersToFinish, maxWaitTime]); + + this.workers.forEach((w) => { + w.stop(); + }); } async forceStop(): Promise { From ce1a4cd6799b566181bda9794bc5ad8a2ccc7f54 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 15 Dec 2025 03:50:33 +0100 Subject: [PATCH 28/31] Add lishogi.org badge --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 1ab9398..7af09de 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,7 @@ # Shoginet +[![lishogi.org](https://img.shields.io/badge/☗_lishogi.org-Play_shogi-black)](https://lishogi.org) + **Distributed network for [Lishogi.org](https://lishogi.org)** ## Installation From 2975b0b73dc1b5bbae0b897d73c2976a4a019436 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Mon, 22 Dec 2025 23:05:56 +0100 Subject: [PATCH 29/31] Store copy of server config locally, so that shoginet can start when server is down --- test/server-config.json => server-config.json | 26 +++++++++++++++---- src/config/server.ts | 10 ++++++- src/main.ts | 7 +---- test/main.ts | 8 +++--- 4 files changed, 36 insertions(+), 15 deletions(-) rename test/server-config.json => server-config.json (80%) diff --git a/test/server-config.json b/server-config.json similarity index 80% rename from test/server-config.json rename to server-config.json index 9791d69..7f8f0a3 100644 --- a/test/server-config.json +++ b/server-config.json @@ -1,11 +1,11 @@ { "analysis": { - "movetime": 1500, - "nodes": 0, - "depth": 18 + "movetime": 1000, + "nodes": 1250000, + "depth": 20 }, "puzzle": { - "movetime": 2000, + "movetime": 3000, "depth": 18, "maxLength": 12 }, @@ -63,6 +63,22 @@ } }, "yaneuraou": { + "1": { + "movetime": 30, + "depth": 1 + }, + "2": { + "movetime": 100, + "depth": 1 + }, + "3": { + "movetime": 150, + "depth": 1 + }, + "4": { + "movetime": 50, + "depth": 2 + }, "5": { "movetime": 300, "depth": 2 @@ -89,4 +105,4 @@ } } } -} \ No newline at end of file +} diff --git a/src/config/server.ts b/src/config/server.ts index 91b0ea8..34b3d53 100644 --- a/src/config/server.ts +++ b/src/config/server.ts @@ -1,3 +1,4 @@ +import serverConfig from '../../server-config.json' with { type: 'json' }; import { SERVER_CONFIG_REFETCH_SECONDS } from '../consts.js'; import { getServerConfig } from '../http.js'; import { baseLogger } from '../logger.js'; @@ -36,7 +37,14 @@ export class ServerConfig { }); async initialize(): Promise { - await this.load(); + try { + await this.load(); + } catch { + this.config = serverConfig; + this.logger.error( + 'Failed to fetch server config file, using local config', + ); + } this.startPeriodicRefresh(); } diff --git a/src/main.ts b/src/main.ts index 5efc013..68f1d6f 100644 --- a/src/main.ts +++ b/src/main.ts @@ -17,12 +17,7 @@ async function main(): Promise { const serverConfig = new ServerConfig(); baseLogger.info('Fetching server config...'); - try { - await serverConfig.initialize(); - } catch (err) { - baseLogger.error(`Failed to fetch server config file: ${err}`); - process.exit(1); - } + await serverConfig.initialize(); baseLogger.info('Verifying shoginet version...'); try { diff --git a/test/main.ts b/test/main.ts index c3a4f4b..df52da3 100644 --- a/test/main.ts +++ b/test/main.ts @@ -2,7 +2,6 @@ import { type ChildProcessWithoutNullStreams, spawn } from 'node:child_process'; import http, { type IncomingMessage, type ServerResponse } from 'node:http'; import config from 'config'; import { StatusCodes } from 'http-status-codes'; -import serverConfig from './server-config.json' with { type: 'json' }; import { type WorkDefinition, works } from './works.js'; function main() { @@ -42,8 +41,11 @@ function main() { switch (req.url) { case '/shoginet/config': { - res.writeHead(StatusCodes.OK, { 'Content-Type': 'application/json' }); - return res.end(JSON.stringify(serverConfig)); + // make sure shoginet can start on server downtime + res.writeHead(StatusCodes.INTERNAL_SERVER_ERROR, { + 'Content-Type': 'application/json', + }); + return res.end(); } case '/shoginet/acquire': { return getNextWork(); From cde40242795c0761cdfede10260bd8f07f7672a8 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Tue, 30 Dec 2025 22:19:36 +0100 Subject: [PATCH 30/31] Update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7af09de..04585b5 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ npm run start You will probably want to run Shoginet with a process manager. For systemd (Linux) integration: ```console -node ./scripts/systemd.js > /etc/systemd/system/shoginet.service +node ./scripts/systemd.js | sudo tee /etc/systemd/system/shoginet.service > /dev/null sudo systemctl daemon-reload sudo systemctl start shoginet ``` From 935e11567267162e4bf2b749da30e0309081e0c0 Mon Sep 17 00:00:00 2001 From: WandererXII <43043181+WandererXII@users.noreply.github.com> Date: Tue, 30 Dec 2025 22:20:15 +0100 Subject: [PATCH 31/31] Fix check if we have puzzle --- src/work/puzzle.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/work/puzzle.ts b/src/work/puzzle.ts index f6e6cce..fa108ba 100644 --- a/src/work/puzzle.ts +++ b/src/work/puzzle.ts @@ -88,7 +88,7 @@ export function puzzle(worker: Worker, engine: Engine, work: PuzzleWork): void { const trimmedResultUsis = resultUsis.length % 2 ? resultUsis : resultUsis.slice(0, -1); const filteredResultUsis = clearFutileInterposition(trimmedResultUsis); - const doWeHavePuzzle = !!filteredResultUsis; + const doWeHavePuzzle = filteredResultUsis.length > 0; const result: any = { rejected: !doWeHavePuzzle }; if (doWeHavePuzzle) {