diff --git a/.gitignore b/.gitignore index d79145a3..2a7f4797 100644 --- a/.gitignore +++ b/.gitignore @@ -18,3 +18,8 @@ Media/* Media/PGNs/* Media/Networks/* Media/Events/* + +*.lock +*.epd + +webhooks.json \ No newline at end of file diff --git a/Books/UHO_Lichess_4852_v1.epd.json b/Books/UHO_Lichess_4852_v1.epd.json new file mode 100644 index 00000000..e922de5b --- /dev/null +++ b/Books/UHO_Lichess_4852_v1.epd.json @@ -0,0 +1,4 @@ +{ + "sha": "7A7F6470615A69C6CF23D565417701D38732876F480AF90D67B42ABADE35644A", + "source": "https://raw.githubusercontent.com/AndyGrant/openbench-books/master/UHO_Lichess_4852_v1.epd.zip" +} diff --git a/Client/bench.py b/Client/bench.py index 3ec840a5..c84202e5 100644 --- a/Client/bench.py +++ b/Client/bench.py @@ -20,8 +20,8 @@ # The sole purpose of this module is to invoke run_benchmark(). # -# - binary : Path to, and including, the Binary File -# - network : Path to Network File, or None +# - binary : Relative path to, and including, the Binary File +# - network : Relative path to a private engine's Network File, or None # - private : True or False; Private NNUE engines require special care # - threads : Number of concurrent benches to run # - sets : Number of times to repeat this experiment @@ -37,8 +37,9 @@ import subprocess import sys -from utils import kill_process_by_name -from utils import OpenBenchBadBenchException +## Local imports must only use "import x", never "from x import ..." + +import utils MAX_BENCH_TIME_SECONDS = 60 @@ -103,8 +104,8 @@ def multi_core_bench(binary, network, private, threads): return [outqueue.get(timeout=MAX_BENCH_TIME_SECONDS) for ii in range(threads)] except queue.Empty: # Force kill the engine, thus causing the processes to finish - kill_process_by_name(binary) - raise OpenBenchBadBenchException('[%s] Bench Exceeded Max Duration' % (binary)) + utils.kill_process_by_name(binary) + raise utils.OpenBenchBadBenchException('[%s] Bench Exceeded Max Duration' % (binary)) finally: # Join everything to avoid zombie processes for process in processes: @@ -120,12 +121,12 @@ def run_benchmark(binary, network, private, threads, sets, expected=None): benches.append(bench); speeds.append(speed) if len(set(benches)) != 1: - raise OpenBenchBadBenchException('[%s] Non-Deterministic Benches' % (engine)) + raise utils.OpenBenchBadBenchException('[%s] Non-Deterministic Benches' % (engine)) if None in benches or None in speeds: - raise OpenBenchBadBenchException('[%s] Failed to Execute Benchmark' % (engine)) + raise utils.OpenBenchBadBenchException('[%s] Failed to Execute Benchmark' % (engine)) if expected and expected != benches[0]: - raise OpenBenchBadBenchException('[%s] Wrong Bench: %d' % (engine, benches[0])) + raise utils.OpenBenchBadBenchException('[%s] Wrong Bench: %d' % (engine, benches[0])) - return int(sum(speeds) / len(speeds)), benches[0] + return sum(speeds) // len(speeds), benches[0] diff --git a/Client/client.py b/Client/client.py index 7ce5cb72..9434a84e 100644 --- a/Client/client.py +++ b/Client/client.py @@ -96,10 +96,11 @@ def parse_arguments(): ) # Create and parse all arguments into a raw format - p.add_argument('-U', '--username', help=help_user , required=req_user ) - p.add_argument('-P', '--password', help=help_pass , required=req_pass ) - p.add_argument('-S', '--server' , help=help_server , required=req_server) - p.add_argument( '--clean' , help='Force New Client', action='store_true') + p.add_argument('-U', '--username' , help=help_user , required=req_user ) + p.add_argument('-P', '--password' , help=help_pass , required=req_pass ) + p.add_argument('-S', '--server' , help=help_server , required=req_server) + p.add_argument( '--clean' , help='Force New Client' , action='store_true') + p.add_argument( '--no-client-downloads', help='NEVER download a client', action='store_true') # Override, to possibly print worker.py's help as well as client.py's p.print_help = lambda: custom_help(p.format_help()) @@ -160,6 +161,9 @@ def download_client_files(args): args = parse_arguments() + if args.no_client_downloads and not has_worker(): + raise Exception('Client missing, and --no-client-downloads provided') + if args.clean or not has_worker(): print ('[NOTE] Downloading Client...') try_forever(download_client_files, [args], 'Failed to download Client files') @@ -174,6 +178,10 @@ def download_client_files(args): worker.run_openbench_worker(args) except BadVersionException: + + if args.no_client_downloads: + raise Exception('Client update requested, but --no-client-downloads provided') + print ('[NOTE] Downloading newer version of Client...') try_forever(download_client_files, [args], 'Failed to download Client files') diff --git a/Client/genfens.py b/Client/genfens.py index 8e9706e6..b2d54019 100644 --- a/Client/genfens.py +++ b/Client/genfens.py @@ -18,14 +18,14 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# The sole purpose of this module is to invoke create_genfens_opening_book(). +# The main purpose of this module is to invoke create_genfens_opening_book(). +# Refer to Client/worker.py, or Scripts/genfens_engine.py for the arguments. # -# This will execute engines with commands like the following: +# We will execute engines with commands like the following: # ./engine "genfens N seed S book " "quit" # # This work is split over many engines. If a workload requires 1024 openings, -# and there are 16 threads, then each thread will generate 64 openings. The -# openings are saved to Books/openbench.genfens.epd +# and there are 16 threads, then each thread will generate 64 openings. # # create_genfens_opening_book() may raise utils.OpenBenchFailedGenfensException. # This occurs when longer than 15 seconds has elapsed since getting an opening. @@ -38,8 +38,9 @@ import time import multiprocessing -from utils import kill_process_by_name -from utils import OpenBenchFailedGenfensException +## Local imports must only use "import x", never "from x import ..." + +import utils def genfens_required_openings_each(config): @@ -50,38 +51,22 @@ def genfens_required_openings_each(config): return math.ceil(total_games / config.threads) -def genfens_command_args(config, binary_name, network): - - binary = os.path.join('Engines', binary_name) - private = config.workload['test']['dev']['private'] - N = genfens_required_openings_each(config) - book = genfens_book_input_name(config) - extra_args = config.workload['test']['genfens_args'] - - return (binary, network, private, N, book, extra_args) - def genfens_book_input_name(config): - book_name = config.workload['test']['book']['name'] - book_none = book_name.upper() == 'NONE' + book_name = config.workload['test']['book']['name'] + book_none = book_name.upper() == 'NONE' return 'None' if book_none else os.path.join('Books', book_name) -def genfens_seed(config, N_per_thread, thread_index): - - x = config.workload['test']['book_seed'] - y = config.workload['test']['book_index'] - - return (x << 32) + (y + N_per_thread * thread_index) +def genfens_command_builder(args, index): -def genfens_command_builder(binary, network, private, N, book, extra_args, seed): + command = ['./%s' % (args['engine'])] - command = ['./%s' % (binary)] + if args['network'] and args['private']: + command += ['setoption name EvalFile value %s' % (args['network'])] - if network and private: - command += ['setoption name EvalFile value %s' % (network)] - - command += ['genfens %d seed %d book %s %s' % (N, seed, book, extra_args), 'quit'] + fstr = 'genfens %d seed %d book %s %s' + command += [fstr % (args['N'], args['seeds'][index], args['book'], args['extra']), 'quit'] return command @@ -101,51 +86,55 @@ def genfens_single_threaded(command, queue): def genfens_progress_bar(curr, total): - prev_progress = int(50 * (curr - 1) / total) - curr_progress = int(50 * (curr - 0) / total) + prev_progress = 50 * (curr - 1) // total + curr_progress = 50 * (curr - 0) // total if curr_progress != prev_progress: bar_text = '=' * curr_progress + ' ' * (50 - curr_progress) print ('\r[%s] %d/%d' % (bar_text, curr, total), end='', flush=True) -def create_genfens_opening_book(config, binary_name, network): +def convert_fen_to_epd(fen): + + # Input : rnbqkbnr/pppp2pp/4pp2/8/2P2P2/P7/1P1PP1PP/RNBQKBNR b KQkq - 0 3 + # Output : rnbqkbnr/pppp2pp/4pp2/8/2P2P2/P7/1P1PP1PP/RNBQKBNR b KQkq - hmvc 0; fmvn 3; + + halfmove, fullmove = fen.split()[4:] - # Format: ./engine "genfens N seed S book " "quit" - N = genfens_required_openings_each(config) - seeds = config.workload['test']['genfens_seeds'] - args = genfens_command_args(config, binary_name, network) + return ' '.join(fen.split()[:4]) + ' hmvc %d; fmvn %d;' % (int(halfmove), int(fullmove)) +def create_genfens_opening_book(args): + + N = args['N'] + threads = args['threads'] start_time = time.time() output = multiprocessing.Queue() - print ('\nGenerating %d Openings using %d Threads...' % (N * config.threads, config.threads)) + print ('\nGenerating %d Openings using %d Threads...' % (N * threads, threads)) # Split the work over many threads. Ensure the seed varies by the thread, # number in accordance with how many openings each thread will generate + processes = [ multiprocessing.Process( target=genfens_single_threaded, - args=(genfens_command_builder(*args, seeds[ii]), output)) - for ii in range(config.threads) + args=(genfens_command_builder(args, index), output)) + for index in range(threads) ] for process in processes: process.start() - # Parse the Queue and save the content into Books/openbench.genfens.epd - with open(os.path.join('Books', 'openbench.genfens.epd'), 'w') as fout: - - try: # Each process will deposit exactly N results into the Queue - for iteration in range(N * config.threads): - fout.write(output.get(timeout=15) + '\n') - genfens_progress_bar(iteration+1, N * config.threads) + try: # Each process will deposit exactly N results into the Queue + for iteration in range(N * threads): + args['output'].write(convert_fen_to_epd(output.get(timeout=15)) + '\n') + genfens_progress_bar(iteration+1, N * threads) - except queue.Empty: # Force kill the engine, thus causing the processes to finish - kill_process_by_name(binary_name) - raise OpenBenchFailedGenfensException('[%s] Stalled during genfens' % (binary_name)) + except queue.Empty: # Force kill the engine, thus causing the processes to finish + utils.kill_process_by_name(binary_name) + raise utils.OpenBenchFailedGenfensException('[%s] Stalled during genfens' % (binary_name)) - finally: # Join everything to avoid zombie processes - for process in processes: - process.join() + finally: # Join everything to avoid zombie processes + for process in processes: + process.join() print('\nFinished Building Opening Book in %.3f seconds' % (time.time() - start_time)) diff --git a/Client/pgn_util.py b/Client/pgn_util.py index 1df1b2ba..42cea555 100644 --- a/Client/pgn_util.py +++ b/Client/pgn_util.py @@ -22,6 +22,8 @@ import re import sys +## Local imports must only use "import x", never "from x import ..." + # For use externally REGEX_COMMENT_VERBOSE = r'(book|[+-]?M?\d+(?:\.\d+)? \d+/\d+ \d+ \d+)' REGEX_COMMENT_COMPACT = r'(book|[+-]?M?\d+(?:\.\d+)?) \d+/\d+ \d+ \d+' diff --git a/Client/utils.py b/Client/utils.py index 63f7fad2..cef63327 100644 --- a/Client/utils.py +++ b/Client/utils.py @@ -28,9 +28,17 @@ import tempfile import zipfile +## Local imports must only use "import x", never "from x import ..." + IS_WINDOWS = platform.system() == 'Windows' # Don't touch this IS_LINUX = platform.system() != 'Windows' # Don't touch this + +class OpenBenchFatalWorkerException(Exception): + def __init__(self, message): + self.message = 'Restarting Worker: ' + message + super().__init__(self.message) + class OpenBenchBuildFailedException(Exception): def __init__(self, message, logs): self.message = message @@ -73,9 +81,16 @@ def __init__(self, message): self.message = message super().__init__(self.message) +class OpenBenchMisssingPGNException(Exception): + def __init__(self, message): + self.message = message + super().__init__(self.message) + def kill_process_by_name(process_name): + process_name = os.path.basename(process_name) + if IS_LINUX: subprocess.run(['pkill', '-f', process_name]) diff --git a/Client/worker.py b/Client/worker.py index 34bc6e2b..268eb6a5 100644 --- a/Client/worker.py +++ b/Client/worker.py @@ -20,6 +20,7 @@ import argparse import cpuinfo +import importlib import json import multiprocessing import os @@ -40,21 +41,23 @@ from itertools import combinations_with_replacement from concurrent.futures import ThreadPoolExecutor -## Local imports +## Local imports must only use "import x", never "from x import ..." +## Local imports must also be done in reload_local_imports() import bench +import genfens +import pgn_util +import utils + +## Local imports from client are an exception from client import BadVersionException from client import url_join from client import try_forever -from utils import * -from pgn_util import compress_list_of_pgns -from genfens import create_genfens_opening_book - ## Basic configuration of the Client. These timeouts can be changed at will -CLIENT_VERSION = 32 # Client version to send to the Server +CLIENT_VERSION = 37 # Client version to send to the Server TIMEOUT_HTTP = 30 # Timeout in seconds for HTTP requests TIMEOUT_ERROR = 10 # Timeout in seconds when any errors are thrown TIMEOUT_WORKLOAD = 30 # Timeout in seconds between workload requests @@ -100,11 +103,12 @@ def process_args(self, args): self.username = args.username self.password = args.password self.server = args.server - self.threads = int(args.threads) + self.threads = int(args.threads) if args.threads != 'auto' else self.physical_cores self.sockets = int(args.nsockets) self.identity = args.identity if args.identity else 'None' self.syzygy_path = args.syzygy if args.syzygy else None self.fleet = args.fleet if args.fleet else False + self.noisy = args.noisy if args.noisy else False self.focus = args.focus if args.focus else [] def init_client(self): @@ -150,16 +154,34 @@ def scan_for_compilers(self, data): # Try to find at least one working compiler for compiler in build_info['compilers']: + # Some compilers, like cargo, can require additional flags like `nightly` + additional_flags = None # Compilers may require a specific version if '>=' in compiler: compiler, version = compiler.split('>=') + + # Save any additional flags after the - + if '-' in version: + version, additional_flags = version.split('-') + version = tuple(map(int, version.split('.'))) else: version = (0, 0, 0) # Try to confirm this compiler is present, and new enough try: + found_flags = None match = get_version(compiler) + + # Fetch any additional flags after the - + if '-' in match: + match, found_flags = match.split('-') + + # If this engine requires additional flags, check that this worker has them + if additional_flags and additional_flags not in found_flags: + # Required flags not found; unable to execute compiler + continue + if tuple(map(int, match.split('.'))) >= version: print('%-16s | %-8s (%s)' % (engine, compiler, match)) self.compilers[engine] = (compiler, match) @@ -229,7 +251,7 @@ def report(config, endpoint, payload, files=None): payload['machine_id'] = config.machine_id payload['secret'] = config.secret_token - target = url_join(config.server, endpoint) + target = utils.url_join(config.server, endpoint) response = requests.post(target, data=payload, files=files, timeout=TIMEOUT_HTTP) # Check for a json repsone, to look for Client Version Errors @@ -240,6 +262,10 @@ def report(config, endpoint, payload, files=None): if 'Bad Client Version' in as_json.get('error', ''): raise BadVersionException() + # Some fatal error, forcing us out of the Workload + if 'error' in as_json: + raise utils.OpenBenchFatalWorkerException(as_json['error']) + return response @staticmethod @@ -452,6 +478,7 @@ def engine_settings(config, command, branch, scale_factor, cutechess_idx): network = config.workload['test'][branch]['network'] private = config.workload['test'][branch]['private'] engine = config.workload['test'][branch]['engine'] + protocol= config.workload['test'][branch]['protocol'] syzygy = config.workload['test']['syzygy_wdl'] # Human-readable name, and scale the time control @@ -478,7 +505,7 @@ def engine_settings(config, command, branch, scale_factor, cutechess_idx): # Join options together in the Cutechess format options = ' option.'.join([''] + re.findall(r'"[^"]*"|\S+', options)) - return '-engine dir=Engines/ cmd=./%s proto=uci %s%s name=%s-%s' % (command, control, options, engine, branch) + return '-engine dir=Engines/ cmd=./%s proto=%s %s%s name=%s-%s' % (command, protocol, control, options, engine, branch) @staticmethod def pgnout_settings(config, timestamp, cutechess_idx): @@ -538,13 +565,13 @@ def parse_finished_game(line): def kill_everything(dev_process, base_process): if IS_LINUX: - kill_process_by_name('cutechess-ob') + utils.kill_process_by_name('cutechess-ob') if IS_WINDOWS: - kill_process_by_name('cutechess-ob.exe') + utils.kill_process_by_name('cutechess-ob.exe') - kill_process_by_name(dev_process) - kill_process_by_name(base_process) + utils.kill_process_by_name(dev_process) + utils.kill_process_by_name(base_process) @staticmethod def pgn_name(config, timestamp, cutechess_idx): @@ -561,6 +588,10 @@ class PGNHelper: @staticmethod def slice_pgn_file(file): + if not os.path.isfile(file): + reason = 'Unable to find %s. Cutechess exited with no finished games.' % (file) + raise utils.OpenBenchMisssingPGNException(reason) + with open(file) as pgn: while True: @@ -674,9 +705,8 @@ def send_results(self, report_interval, final_report=False): # Signal an exit if the test ended return 'stop' in response - except BadVersionException: - self.abort_flag.set() - return True + except (BadVersionException, utils.OpenBenchFatalWorkerException): + raise except Exception: traceback.print_exc() @@ -706,12 +736,12 @@ def get_version(program): try: process = Popen([program, '--version'], stdout=PIPE, stderr=PIPE) stdout = process.communicate()[0].decode('utf-8') - return re.search(r'\d+\.\d+(\.\d+)?', stdout).group() + return re.search(r'\d+\.\d+(\.\d+)?(-\w+)?', stdout).group() except: process = Popen([program, 'version'], stdout=PIPE, stderr=PIPE) stdout = process.communicate()[0].decode('utf-8') - return re.search(r'\d+\.\d+(\.\d+)?', stdout).group() + return re.search(r'\d+\.\d+(\.\d+)?(-\w+)?', stdout).group() def locate_utility(util, force_exit=True, report_error=True): @@ -790,7 +820,6 @@ def has_filename(paths, name): def scale_time_control(workload, scale_factor, branch): # Extract everything from the workload dictionary - reference_nps = workload['test'][branch]['nps'] time_control = workload['test'][branch]['time_control'] # Searching for Nodes or Depth time controls ("N=", "D=") @@ -847,13 +876,43 @@ def find_pgn_error(reason, command): return data[ii] + pgn +def determine_scale_factor(config, dev_name, dev_network, base_name, base_network): + + # Run the benchmarks and compute the scaling NPS value + dev_nps = safe_run_benchmarks(config, 'dev' , dev_name , dev_network ) + base_nps = safe_run_benchmarks(config, 'base', base_name, base_network) + ServerReporter.report_nps(config, dev_nps, base_nps) + + dev_factor = base_factor = None + + # Scaling is only done relative to the Dev Engine + if config.workload['test']['scale_method'] == 'DEV': + factor = config.workload['test']['scale_nps'] / dev_nps + print ('\nScale Factor (Using Dev): %.4f' % (factor)) + + # Scaling is only done relative to the Base Engine + elif config.workload['test']['scale_method'] == 'BASE': + factor = config.workload['test']['scale_nps'] / base_nps + print ('\nScale Factor (Using Base): %.4f' % (factor)) + + # Scaling is done using an average of both Engines + else: + dev_factor = config.workload['test']['scale_nps'] / dev_nps + base_factor = config.workload['test']['scale_nps'] / base_nps + factor = (dev_factor + base_factor) / 2 + print ('\nScale Factor (Using Dev ): %.4f' % (dev_factor)) + print ('Scale Factor (Using Base): %.4f' % (base_factor)) + print ('Scale Factor (Using Both): %.4f' % (factor)) + + return factor + ## Functions interacting with the OpenBench server that establish the initial ## connection and then make simple requests to retrieve Workloads as json objects def server_configure_worker(config): # Server tells us how to build or obtain binaries - target = url_join(config.server, 'clientGetBuildInfo') + target = utils.url_join(config.server, 'clientGetBuildInfo') data = requests.get(target, timeout=TIMEOUT_HTTP).json() config.scan_for_compilers(data) # Public engine build tools @@ -878,6 +937,7 @@ def server_configure_worker(config): 'concurrency' : config.threads, # Threads to use to play games 'sockets' : config.sockets, # Cutechess copies, usually equal to Socket count 'syzygy_max' : config.syzygy_max, # Whether or not the machine has Syzygy support + 'noisy' : config.noisy, # Whether our results are unstable for time-based workloads 'focus' : config.focus, # List of engines we have a preference to help 'client_ver' : CLIENT_VERSION, # Version of the Client, which the server may reject } @@ -889,21 +949,16 @@ def server_configure_worker(config): } # Send all of this to the server, and get a Machine Id + Secret Token - target = url_join(config.server, 'clientWorkerInfo') + target = utils.url_join(config.server, 'clientWorkerInfo') response = requests.post(target, data=payload, timeout=TIMEOUT_HTTP).json() - # Delete the machine.txt if we have saved an invalid machine number - if response.get('error', '').lower() == "bad machine id": - config.machine_id = 'None' - os.remove('machine.txt') - # Throw all the way back to the client.py if 'Bad Client Version' in response.get('error', ''): raise BadVersionException(); # The 'error' header is included if there was an issue if 'error' in response: - raise Exception('[Error] %s' % (response['error'])) + raise utils.OpenBenchFatalWorkerException(response['error']) # Save the machine id, to avoid re-registering every time with open('machine.txt', 'w') as fout: @@ -918,21 +973,21 @@ def server_request_workload(config): print('\nRequesting Workload from Server...') payload = { 'machine_id' : config.machine_id, 'secret' : config.secret_token, 'blacklist' : config.blacklist } - target = url_join(config.server, 'clientGetWorkload') + target = utils.url_join(config.server, 'clientGetWorkload') response = requests.post(target, data=payload, timeout=TIMEOUT_HTTP) # Server errors produce garbage back, which we should not alarm a user with try: response = response.json() except json.decoder.JSONDecodeError: - raise OpenBenchBadServerResponseException() from None + raise utils.OpenBenchBadServerResponseException() from None # Throw all the way back to the client.py if 'Bad Client Version' in response.get('error', ''): raise BadVersionException(); - # The 'error' header is included if there was an issue + # Something very bad happened. Re-initialize the Client if 'error' in response: - raise Exception('[Error] %s' % (response['error'])) + raise utils.OpenBenchFatalWorkerException(response['error']) # Log the start of a new Workload if 'workload' in response: @@ -948,7 +1003,7 @@ def server_request_workload(config): def complete_workload(config): # Download the opening book, throws an exception on corruption - download_opening_book( + utils.download_opening_book( config.workload['test']['book']['sha' ], config.workload['test']['book']['source'], config.workload['test']['book']['name' ], @@ -966,20 +1021,8 @@ def complete_workload(config): if config.workload['test']['type'] == 'DATAGEN': safe_create_genfens_opening_book(config, dev_name, dev_network) - # Run the benchmarks and compute the scaling NPS value - dev_nps = safe_run_benchmarks(config, 'dev' , dev_name , dev_network ) - base_nps = safe_run_benchmarks(config, 'base', base_name, base_network) - ServerReporter.report_nps(config, dev_nps, base_nps) - - # Scale the engines together, using their NPS relative to expected - dev_factor = config.workload['test']['dev' ]['nps'] / dev_nps - base_factor = config.workload['test']['base']['nps'] / base_nps - avg_factor = (dev_factor + base_factor) / 2 - - print () # Record this information - print ('Scale Factor Dev : %.4f' % (dev_factor )) - print ('Scale Factor Base : %.4f' % (base_factor)) - print ('Scale Factor Avg : %.4f' % (avg_factor )) + # Scale time control based on the Engine's local NPS + scale_factor = determine_scale_factor(config, dev_name, dev_network, base_name, base_network) # Server knows how many copies of Cutechess we should run cutechess_cnt = config.workload['distribution']['cutechess-count'] @@ -991,11 +1034,6 @@ def complete_workload(config): print ('%d concurrent games per copy' % (concurrency_per)) print ('%d total games per cutechess copy\n' % (games_per)) - # Scale using the base factor only, in the event of a cross-engine test - dev_engine = config.workload['test']['dev' ]['engine'] - base_engine = config.workload['test']['base']['engine'] - scale_factor = base_factor if dev_engine != base_engine else avg_factor - # Launch and manage all of the Cutechess workers with ThreadPoolExecutor(max_workers=cutechess_cnt) as executor: @@ -1017,7 +1055,6 @@ def complete_workload(config): # Kill everything during an Exception, but print it except (Exception, KeyboardInterrupt): - traceback.print_exc() abort_flag.set() Cutechess.kill_everything(dev_name, base_name) raise @@ -1026,12 +1063,12 @@ def complete_workload(config): if config.workload['test']['upload_pgns'] != 'FALSE': compact = config.workload['test']['upload_pgns'] == 'COMPACT' pgn_files = [Cutechess.pgn_name(config, timestamp, x) for x in range(cutechess_cnt)] - ServerReporter.report_pgn(config, compress_list_of_pgns(pgn_files, scale_factor, compact)) + ServerReporter.report_pgn(config, pgn_util.compress_list_of_pgns(pgn_files, scale_factor, compact)) def safe_download_network_weights(config, branch): # Wraps utils.py:download_network() - # May raise OpenBenchCorruptedNetworkException + # May raise utils.OpenBenchCorruptedNetworkException engine = config.workload['test'][branch]['engine' ] net_name = config.workload['test'][branch]['netname'] @@ -1043,7 +1080,7 @@ def safe_download_network_weights(config, branch): return None credentials = (config.server, config.username, config.password) - download_network(*credentials, engine, net_name, net_sha, net_path) + utils.download_network(*credentials, engine, net_name, net_sha, net_path) return net_path @@ -1057,16 +1094,16 @@ def safe_download_engine(config, branch, net_path): source = config.workload['test'][branch]['source'] private = config.workload['test'][branch]['private'] - bin_name = engine_binary_name(engine, commit_sha, net_path, private) + bin_name = utils.engine_binary_name(engine, commit_sha, net_path, private) out_path = os.path.join('Engines', bin_name) if private: try: - return download_private_engine( + return utils.download_private_engine( engine, branch_name, source, out_path, config.cpu_name, config.cpu_flags) - except OpenBenchMissingArtifactException as error: + except utils.OpenBenchMissingArtifactException as error: ServerReporter.report_missing_artifact(config, branch, error.name, error.logs) raise @@ -1076,10 +1113,10 @@ def safe_download_engine(config, branch, net_path): compiler = config.compilers[engine][0] try: - return download_public_engine( + return utils.download_public_engine( engine, net_path, branch_name, source, make_path, out_path, compiler) - except OpenBenchBuildFailedException as error: + except utils.OpenBenchBuildFailedException as error: print ('Failed to build %s-%s...\n\nCompiler Output:' % (engine, branch_name)) for line in error.logs.split('\n'): @@ -1092,11 +1129,25 @@ def safe_download_engine(config, branch, net_path): def safe_create_genfens_opening_book(config, dev_name, dev_network): - try: create_genfens_opening_book(config, dev_name, dev_network) + with open(os.path.join('Books', 'openbench.genfens.epd'), 'w') as fout: + + args = { + 'N' : genfens.genfens_required_openings_each(config), + 'book' : genfens.genfens_book_input_name(config), + 'seeds' : config.workload['test']['genfens_seeds'], + 'extra' : config.workload['test']['genfens_args'], + 'private' : config.workload['test']['dev']['private'], + 'engine' : os.path.join('Engines', dev_name), + 'network' : dev_network, + 'threads' : config.threads, + 'output' : fout, + } - except OpenBenchFailedGenfensException as error: - ServerReporter.report_engine_error(config, error.message) - raise + try: genfens.create_genfens_opening_book(args) + + except utils.OpenBenchFailedGenfensException as error: + ServerReporter.report_engine_error(config, error.message) + raise def safe_run_benchmarks(config, branch, engine, network): @@ -1110,7 +1161,7 @@ def safe_run_benchmarks(config, branch, engine, network): speed, nodes = bench.run_benchmark( binary, network, private, config.threads, 1, expected) - except OpenBenchBadBenchException as error: + except utils.OpenBenchBadBenchException as error: ServerReporter.report_bad_bench(config, error.message) raise @@ -1189,6 +1240,18 @@ def run_and_parse_cutechess(config, command, cutechess_idx, results_queue, abort # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +def reload_local_imports(): + + import bench + import genfens + import pgn_util + import utils + + importlib.reload(bench) + importlib.reload(genfens) + importlib.reload(pgn_util) + importlib.reload(utils) + def parse_arguments(client_args): # Pretty formatting @@ -1198,12 +1261,13 @@ def parse_arguments(client_args): ) # Arguments specific to worker.py - p.add_argument('-T', '--threads' , help='Total Threads' , required=True ) - p.add_argument('-N', '--nsockets', help='Number of Sockets' , required=True ) - p.add_argument('-I', '--identity', help='Machine pseudonym' , required=False ) - p.add_argument( '--syzygy' , help='Syzygy WDL' , required=False ) - p.add_argument( '--fleet' , help='Fleet Mode' , action='store_true') - p.add_argument( '--focus' , help='Prefer certain engine(s)', nargs='+' ) + p.add_argument('-T', '--threads' , help='Total Threads' , required=True ) + p.add_argument('-N', '--nsockets', help='Number of Sockets' , required=True ) + p.add_argument('-I', '--identity', help='Machine pseudonym' , required=False ) + p.add_argument( '--syzygy' , help='Syzygy WDL' , required=False ) + p.add_argument( '--fleet' , help='Fleet Mode' , action='store_true') + p.add_argument( '--noisy' , help='Reject time-based workloads' , action='store_true') + p.add_argument( '--focus' , help='Prefer certain engine(s)' , nargs='+' ) # Ignore unknown arguments ( from client ) worker_args, unknown = p.parse_known_args() @@ -1213,18 +1277,30 @@ def parse_arguments(client_args): def run_openbench_worker(client_args): - args = parse_arguments(client_args) # Merge client.py and worker.py args - config = Configuration(args) # Holds System info, args, and Workload info + # If the client was updated, we must reload everything + reload_local_imports() setup_error = '[Note] Unable to establish initial connection with the Server!' connection_error = '[Note] Unable to reach the server to request a workload!' + args = parse_arguments(client_args) # Merge client.py and worker.py args + config = Configuration(args) # Holds System info, args, and Workload info try_forever(server_configure_worker, [config], setup_error) if IS_LINUX: set_cutechess_permissions() + # Cleanup in case openbench.exit still exists + if os.path.isfile('openbench.exit'): + os.remove('openbench.exit') + while True: + + # Check for exit signal via openbench.exit + if os.path.isfile('openbench.exit'): + print('Exited via openbench.exit') + sys.exit() + try: # Cleanup on each workload request cleanup_client() @@ -1241,14 +1317,17 @@ def run_openbench_worker(client_args): # In either case, wait before requesting again else: time.sleep(TIMEOUT_WORKLOAD) - # Check for exit signal via openbench.exit - if os.path.isfile('openbench.exit'): - print('Exited via openbench.exit') - sys.exit() - + # Caught by client.py, prompting a Client Update except BadVersionException: raise BadVersionException() + # Fatal error, fully restart the Worker + except utils.OpenBenchFatalWorkerException: + traceback.print_exc() + time.sleep(TIMEOUT_ERROR) + config = Configuration(args) + try_forever(server_configure_worker, [config], setup_error) + except Exception: traceback.print_exc() time.sleep(TIMEOUT_ERROR) diff --git a/Config/config.json b/Config/config.json index 19599e61..8a287763 100644 --- a/Config/config.json +++ b/Config/config.json @@ -1,14 +1,14 @@ { - "client_version" : 32, - "client_repo_url" : "https://github.com/AndyGrant/OpenBench", - "client_repo_ref" : "master", + "client_version": 37, + "client_repo_url": "https://github.com/dannyhammer/openbench", + "client_repo_ref": "master", - "use_cross_approval" : false, - "require_login_to_view" : false, - "require_manual_registration" : false, - "balance_engine_throughputs" : false, + "use_cross_approval": false, + "require_login_to_view": false, + "require_manual_registration": false, + "balance_engine_throughputs": true, - "books" : [ + "books": [ "2moves_v1.epd", "3moves_FRC.epd", "4moves_noob.epd", @@ -19,29 +19,36 @@ "Pohl.pgn", "UHO_4060_v2.epd", "UHO_4060_vB.epd", - "UHO_4060_vT.epd" + "UHO_4060_vT.epd", + "UHO_Lichess_4852_v1.epd" ], - "engines" : [ + "engines": [ "4ku", - "Berserk", - "Bit-Genie", "BlackMarlin", - "Demolito", - "Drofa", - "Equisetum", + "BoyChesser", + "ByteKnight", + "Caps", + "Chessatron", + "Dog", + "Episteme", "Ethereal", "FabChess", - "Halogen", + "GrandChess", "Igel", - "Koivisto", - "Laser", - "RubiChess", - "Seer", + "Obsidian", + "Pawnocchio", + "Perseus", + "Polaris", + "Rose", "Stash", "Stockfish", - "Weiss", - "Winter", - "Zahak" + "Stormphrax", + "Tantabus", + "Toad", + "Viridithas", + "Yukari", + "bannou", + "ice4" ] -} \ No newline at end of file +} diff --git a/Engines/Alexandria.json b/Engines/Alexandria.json deleted file mode 100644 index 7f37bee4..00000000 --- a/Engines/Alexandria.json +++ /dev/null @@ -1,85 +0,0 @@ -{ - "private" : false, - "nps" : 1025000, - "source" : "https://github.com/PGG106/Alexandria", - - "build" : { - "path" : "", - "compilers" : ["g++"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 3.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "STC regression" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "test_bounds" : "[-3.00, 1.00]" - }, - - "LTC regression" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "test_bounds" : "[-3.00, 1.00]" - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "4.0+0.04", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "16.0+0.16", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Altair.json b/Engines/Altair.json deleted file mode 100644 index 6a91583c..00000000 --- a/Engines/Altair.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "private" : false, - "nps" : 990000, - "source" : "https://github.com/Alex2262/AltairChessEngine", - - "build" : { - "path" : "", - "compilers" : ["clang++"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "STC Simple" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "test_bounds" : "[-5.00, 0.00]" - }, - - "LTC Simple" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "test_bounds" : "[-5.00, 0.00]" - }, - - "STC Prog" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "book_name" : "8moves_v3.epd", - "test_max_games" : 3000 - }, - - "LTC Prog" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "book_name" : "8moves_v3.epd", - "test_max_games" : 1000 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Berserk.json b/Engines/Berserk.json deleted file mode 100644 index 7c7b0ca9..00000000 --- a/Engines/Berserk.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "private" : false, - "nps" : 750000, - "source" : "https://github.com/jhonnold/berserk", - - "build" : { - "path" : "src", - "compilers" : ["clang", "gcc"], - "cpuflags" : ["AVX2", "FMA", "POPCNT"], - "systems" : ["Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "main", - "book_name" : "UHO_4060_vB.epd", - "test_bounds" : "[0.00, 2.00]", - "test_confidence" : "[0.1, 0.05]", - "win_adj" : "movecount=4 score=300", - "draw_adj" : "movenumber=32 movecount=6 score=6" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "10.0+0.1", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_bounds" : "[0.00, 2.50]" - }, - - "STC Simplify" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "10.0+0.1", - "workload_size" : 32, - "test_bounds" : "[-2.00, 0.00]" - }, - - "LTC Simplify" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_bounds" : "[-2.00, 0.00]" - }, - - "STC Fixed" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "10.0+0.1", - "workload_size" : 32, - "test_max_games" : 20000 - }, - - "LTC Fixed" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_max_games" : 20000 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "30.0+0.30", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "UHO_4060_vB.epd", - "win_adj" : "movecount=4 score=300", - "draw_adj" : "movenumber=32 movecount=6 score=6" - }, - - "STC" : { - "dev_options" : "Threads=1 Hash=8", - "dev_time_control" : "10.0+0.10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Bit-Genie.json b/Engines/Bit-Genie.json deleted file mode 100644 index c28384dd..00000000 --- a/Engines/Bit-Genie.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 1992000, - "source" : "https://github.com/Aryan1508/Bit-Genie", - - "build" : { - "path" : "src", - "compilers" : ["g++"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/BoyChesser.json b/Engines/BoyChesser.json new file mode 100644 index 00000000..c6c4f1d9 --- /dev/null +++ b/Engines/BoyChesser.json @@ -0,0 +1,44 @@ +{ + "private" : false, + "nps" : 900000, + "source" : "https://github.com/analog-hors/Boychesser", + + "build" : { + "path" : "", + "compilers" : ["dotnet>=7.0.0"], + "cpuflags" : ["POPCNT"], + "systems" : ["Window", "Linux"] + }, + + "test_presets" : { + "default" : { + "base_branch" : "main", + "book_name" : "4moves_noob.epd", + "test_bounds" : "[0.00, 5.00]", + "test_confidence" : "[0.1, 0.05]", + "win_adj" : "None", + "draw_adj" : "None", + "syzygy_adj" : "Disabled" + }, + + "STC" : { + "both_options" : "Threads=1 Hash=8", + "both_time_control" : "8.0+0.08", + "workload_size" : 8 + }, + + "LTC" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "40.0+0.4", + "workload_size" : 4 + } + }, + + "tune_presets" : { + "default" : { + "book_name" : "4moves_noob.epd", + "win_adj" : "movecount=3 score=400", + "draw_adj" : "movenumber=40 movecount=8 score=10" + } + } +} diff --git a/Engines/ByteKnight.json b/Engines/ByteKnight.json new file mode 100644 index 00000000..d734adb4 --- /dev/null +++ b/Engines/ByteKnight.json @@ -0,0 +1,105 @@ +{ + "private": false, + "nps": 3695000, + "source": "https://github.com/DeveloperPaul123/byte-knight", + "build": { + "path": "", + "compilers": [ + "cargo>=1.80.1" + ], + "cpuflags": [], + "systems": [ + "Linux", + "Windows", + "Darwin" + ] + }, + "test_presets": { + "default": { + "base_branch": "main", + "book_name": "Pohl.epd", + "test_bounds": "[0.00, 10.00]", + "test_confidence": "[0.05, 0.05]", + "win_adj": "None", + "draw_adj": "None" + }, + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + "LTC": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + }, + "STC Regression": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-5.00, 0.00]" + }, + "LTC Regression": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-5.00, 0.00]" + }, + "STC Prog": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_max_games": 3000 + }, + "LTC Prog": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "60.0+0.6", + "workload_size": 8, + "test_max_games": 1000 + }, + "SMP STC": { + "both_options": "Threads=4 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + "SMP LTC": { + "both_options": "Threads=4 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + } + }, + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + "STC": { + "dev_options": "Threads=1 Hash=8", + "dev_time_control": "10.0+0.10" + }, + "MTC": { + "dev_options": "Threads=1 Hash=32", + "dev_time_control": "30.0+0.30" + }, + "LTC": { + "dev_options": "Threads=1 Hash=64", + "dev_time_control": "60.0+0.60" + }, + "VLTC": { + "dev_options": "Threads=1 Hash=128", + "dev_time_control": "180.0+1.80" + } + }, + "datagen_presets": { + "default": { + "win_adj": "None", + "draw_adj": "None", + "workload_size": 128 + }, + "40k Nodes": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=40000" + } + } +} diff --git a/Engines/Caissa.json b/Engines/Caissa.json deleted file mode 100644 index 461504c9..00000000 --- a/Engines/Caissa.json +++ /dev/null @@ -1,83 +0,0 @@ -{ - "private" : false, - "nps" : 1340000, - "source" : "https://github.com/Witek902/Caissa", - - "build" : { - "path" : "src", - "compilers" : ["g++"], - "cpuflags" : ["AVX2", "FMA", "POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "UHO_Lichess_4852_v1.epd", - "test_bounds" : "[0.00, 2.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=32 movecount=8 score=5" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "10.0+0.1", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "60.0+0.6", - "workload_size" : 8 - }, - - "LTC Fixed" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_max_games" : 20000 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "30.0+0.30", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "UHO_4060_v2.epd", - "win_adj" : "movecount=4 score=500", - "draw_adj" : "movenumber=32 movecount=6 score=6" - }, - - "STC" : { - "dev_options" : "Threads=1 Hash=8", - "dev_time_control" : "10.0+0.10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Akimbo.json b/Engines/Caps.json similarity index 74% rename from Engines/Akimbo.json rename to Engines/Caps.json index 0841f7c0..5fe5877c 100644 --- a/Engines/Akimbo.json +++ b/Engines/Caps.json @@ -1,11 +1,11 @@ { "private" : false, - "nps" : 1075000, - "source" : "https://github.com/jw1912/akimbo", + "nps" : 1200000, + "source" : "https://github.com/toanth/motors", "build" : { "path" : "", - "compilers" : ["cargo>=1.70.0"], + "compilers" : ["cargo>=1.86.0"], "cpuflags" : [], "systems" : ["Linux", "Windows", "Darwin"] }, @@ -14,24 +14,24 @@ "default" : { "base_branch" : "main", - "book_name" : "Pohl.epd", + "book_name" : "UHO_Lichess_4852_v1.epd", "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" + "test_confidence" : "[0.10, 0.05]", + "win_adj" : "movecount=10 score=1000", + "draw_adj" : "movenumber=40 movecount=20 score=7" }, "STC" : { "both_options" : "Threads=1 Hash=32", "both_time_control" : "8.0+0.08", "workload_size" : 32, - "test_bounds" : "[0.00, 3.00]" + "test_bounds" : "[0.00, 5.00]" }, "LTC" : { "both_options" : "Threads=1 Hash=128", "both_time_control" : "40.0+0.4", - "workload_size" : 8 + "workload_size" : 16 }, "STC regression" : { @@ -45,7 +45,7 @@ "both_options" : "Threads=1 Hash=128", "both_time_control" : "40.0+0.4", "workload_size" : 8, - "test_bounds" : "[-5.00, 0.00]" + "test_bounds" : "[-7.00, 0.00]" }, "STC progtest" : { @@ -68,9 +68,9 @@ "tune_presets" : { "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" + "book_name" : "UHO_Lichess_4852_v1.epd", + "win_adj" : "movecount=7 score=1200", + "draw_adj" : "movenumber=50 movecount=15 score=15" } }, diff --git a/Engines/Chessatron.json b/Engines/Chessatron.json new file mode 100644 index 00000000..a7a2f2fc --- /dev/null +++ b/Engines/Chessatron.json @@ -0,0 +1,35 @@ +{ + "private": false, + "nps": 970000, + "source": "https://github.com/dallinson/Chessatron", + + "build": { + "path": "", + "compilers": ["clang++>=17.0.0"], + "cpuflags": [], + "systems": ["Linux", "Windows"] + }, + + "test_presets": { + "default": { + "base_branch": "canon", + "book_name": "8moves_v3.epd", + "test_bounds": "[0.00, 5.00]", + "test_confidence": "[0.1, 0.05]", + "win_adj": "movecount=3 score=600", + "draw_adj": "movenumber=40 movecount=8 score=0", + "workload_size": 32 + }, + + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08" + }, + + "STC (non-reg)": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "test_bounds": "[-5.00, 0.00]" + } + } + } \ No newline at end of file diff --git a/Engines/Clover.json b/Engines/Clover.json deleted file mode 100644 index 401155bd..00000000 --- a/Engines/Clover.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 850000, - "source" : "https://github.com/lucametehau/CloverEngine", - - "build" : { - "path" : "src", - "compilers" : ["g++"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 3.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "4.0+0.04", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "16.0+0.16", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Demolito.json b/Engines/Demolito.json deleted file mode 100644 index 89662915..00000000 --- a/Engines/Demolito.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 1382000, - "source" : "https://github.com/lucasart/Demolito", - - "build" : { - "path" : "src", - "compilers" : ["clang", "gcc"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "32.0+0.32", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=32", - "both_time_control" : "4.0+0.04", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=128", - "both_time_control" : "16.0+0.16", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Dog.json b/Engines/Dog.json new file mode 100644 index 00000000..9dd3b4b6 --- /dev/null +++ b/Engines/Dog.json @@ -0,0 +1,44 @@ +{ + "private" : false, + "nps" : 2923562, + "source" : "https://github.com/folkertvanheusden/Dog", + + "build" : { + "path" : "app/src/linux-windows", + "compilers" : ["g++-14", "clang++-14", "clang++-19"], + "cpuflags" : [], + "systems" : ["Linux"] + }, + + "test_presets" : { + "default" : { + "base_branch" : "master", + "book_name" : "4moves_noob.epd", + "test_bounds" : "[0.00, 5.00]", + "test_confidence" : "[0.1, 0.05]", + "win_adj" : "None", + "draw_adj" : "None", + "syzygy_adj" : "7-Man" + }, + + "STC" : { + "both_options" : "Threads=1 Hash=8", + "both_time_control" : "8.0+0.08", + "workload_size" : 8 + }, + + "LTC" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "40.0+0.4", + "workload_size" : 4 + } + }, + + "tune_presets" : { + "default" : { + "book_name" : "4moves_noob.epd", + "win_adj" : "movecount=3 score=400", + "draw_adj" : "movenumber=40 movecount=8 score=10" + } + } +} diff --git a/Engines/Dragon.json b/Engines/Dragon.json deleted file mode 100644 index 0c6d781d..00000000 --- a/Engines/Dragon.json +++ /dev/null @@ -1,147 +0,0 @@ -{ - "private" : true, - "nps" : 584000, - "source" : "https://github.com/ChessCom/komodo", - - "build" : { - "cpuflags" : ["AVX2", "FMA", "POPCNT"], - "systems" : ["Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "openbench", - "book_name" : "UHO_4060_v2.epd", - "test_bounds" : "[0.00, 2.00]", - "test_confidence" : "[0.10, 0.05]", - "win_adj" : "movecount=5 score=300", - "draw_adj" : "movenumber=32 movecount=6 score=8" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "10.0+0.10", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_bounds" : "[0.00, 2.25]", - "upload_pgns" : "COMPACT" - }, - - "SMP STC" : { - "both_options" : "Threads=6 Hash=128 Minimal=true", - "both_time_control" : "10.0+0.1", - "workload_size" : 64, - "upload_pgns" : "COMPACT" - }, - - "SMP LTC" : { - "both_options" : "Threads=15 Hash=256 Minimal=true", - "both_time_control" : "30.0+0.3", - "workload_size" : 16, - "test_bounds" : "[0.50, 2.50]", - "upload_pgns" : "COMPACT" - }, - - "STC Simplification" : { - "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "10.0+0.10", - "workload_size" : 32, - "test_bounds" : "[-1.75, 0.25]" - }, - - "LTC Simplification" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_bounds" : "[-1.75, 0.25]", - "upload_pgns" : "COMPACT" - }, - - "STC Regression" : { - "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "10.0+0.10", - "workload_size" : 32, - "book_name" : "Pohl.epd", - "test_max_games" : 40000 - }, - - "LTC Regression" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "book_name" : "Pohl.epd", - "test_max_games" : 40000, - "upload_pgns" : "COMPACT" - }, - - "STC Fixed Games" : { - "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "10.0+0.10", - "workload_size" : 32, - "test_max_games" : 40000 - }, - - "LTC Fixed Games" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "test_max_games" : 40000 - }, - - "57.5k Fixed Nodes" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "N=57500", - "workload_size" : 128, - "test_max_games" : 40000 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "UHO_4060_v2.epd", - "win_adj" : "movecount=5 score=300", - "draw_adj" : "movenumber=32 movecount=6 score=8" - }, - - "STC" : { - "dev_options" : "Threads=1 Hash=16 Minimal=true", - "dev_time_control" : "10.0+0.10" - }, - - "MTC" : { - "dev_options" : "Threads=1 Hash=32 Minimal=true", - "dev_time_control" : "30.0+0.30" - }, - - "LTC" : { - "dev_options" : "Threads=1 Hash=64 Minimal=true", - "dev_time_control" : "60.0+0.60" - }, - - "VLTC" : { - "dev_options" : "Threads=1 Hash=128 Minimal=true", - "dev_time_control" : "180.0+1.80" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} \ No newline at end of file diff --git a/Engines/Carp.json b/Engines/Episteme.json similarity index 67% rename from Engines/Carp.json rename to Engines/Episteme.json index 7ad3cb3c..54d83610 100644 --- a/Engines/Carp.json +++ b/Engines/Episteme.json @@ -1,20 +1,20 @@ { "private" : false, - "nps" : 975000, - "source" : "https://github.com/dede1751/carp", + "nps" : 1400000, + "source" : "https://github.com/aletheiaaaaa/episteme", "build" : { "path" : "", - "compilers" : ["cargo>=1.70.0"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] + "compilers" : ["g++"], + "cpuflags" : ["AVX2"], + "systems" : ["Linux"] }, "test_presets" : { "default" : { - "base_branch" : "dev", - "book_name" : "Pohl.epd", + "base_branch" : "main", + "book_name" : "UHO_Lichess_4852_v1.epd", "test_bounds" : "[0.00, 5.00]", "test_confidence" : "[0.05, 0.05]", "win_adj" : "movecount=3 score=400", @@ -33,28 +33,28 @@ "workload_size" : 8 }, - "STC Reg" : { + "STC regression" : { "both_options" : "Threads=1 Hash=16", "both_time_control" : "8.0+0.08", "workload_size" : 32, - "test_bounds" : "[-4.00, 1.00]" + "test_bounds" : "[-5.00, 0.00]" }, - "LTC Reg" : { + "LTC regression" : { "both_options" : "Threads=1 Hash=128", "both_time_control" : "40.0+0.4", "workload_size" : 8, - "test_bounds" : "[-4.00, 1.00]" + "test_bounds" : "[-5.00, 0.00]" }, - "STC Prog" : { + "STC progtest" : { "both_options" : "Threads=1 Hash=16", "both_time_control" : "8.0+0.08", "workload_size" : 32, "test_max_games" : 3000 }, - "LTC Prog" : { + "LTC progtest" : { "both_options" : "Threads=1 Hash=128", "both_time_control" : "60.0+0.6", "workload_size" : 8, @@ -77,9 +77,29 @@ "tune_presets" : { "default" : { - "book_name" : "Pohl.epd", + "book_name" : "UHO_4060_v2.epd", "win_adj" : "movecount=3 score=400", "draw_adj" : "movenumber=40 movecount=8 score=10" + }, + + "STC" : { + "dev_options" : "Threads=1 Hash=8", + "dev_time_control" : "10.0+0.10" + }, + + "MTC" : { + "dev_options" : "Threads=1 Hash=32", + "dev_time_control" : "30.0+0.30" + }, + + "LTC" : { + "dev_options" : "Threads=1 Hash=64", + "dev_time_control" : "60.0+0.60" + }, + + "VLTC" : { + "dev_options" : "Threads=1 Hash=128", + "dev_time_control" : "180.0+1.80" } }, diff --git a/Engines/Equisetum.json b/Engines/Equisetum.json deleted file mode 100644 index a1b2db22..00000000 --- a/Engines/Equisetum.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 1100000, - "source" : "https://github.com/justNo4b/Equisetum", - - "build" : { - "path" : "", - "compilers" : ["g++"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "main", - "book_name" : "Pohl.pgn", - "test_bounds" : "[0.00, 4.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.pgn", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Ethereal.json b/Engines/Ethereal.json index b0471d2f..c91f6252 100644 --- a/Engines/Ethereal.json +++ b/Engines/Ethereal.json @@ -14,7 +14,7 @@ "default" : { "base_branch" : "master", - "book_name" : "UHO_4060_v2.epd", + "book_name" : "UHO_Lichess_4852_v1.epd", "test_bounds" : "[0.00, 3.00]", "test_confidence" : "[0.05, 0.05]", "win_adj" : "movecount=3 score=400", @@ -49,7 +49,7 @@ "tune_presets" : { "default" : { - "book_name" : "UHO_4060_v2.epd", + "book_name" : "UHO_Lichess_4852_v1.epd", "win_adj" : "movecount=3 score=400", "draw_adj" : "movenumber=40 movecount=8 score=10" }, diff --git a/Engines/Zahak.json b/Engines/GrandChess.json similarity index 53% rename from Engines/Zahak.json rename to Engines/GrandChess.json index 8fc0b73b..9a7b1e14 100644 --- a/Engines/Zahak.json +++ b/Engines/GrandChess.json @@ -1,71 +1,56 @@ { "private" : false, - "nps" : 949000, - "source" : "https://github.com/amanjpro/zahak", + "nps" : 643696, + "source" : "https://github.com/urisinger/grand-chess-rs", "build" : { "path" : "", - "compilers" : ["go"], - "cpuflags" : ["AVX", "POPCNT"], - "systems" : ["Windows", "Linux"] + "compilers" : ["cargo>=1.65.0-nightly"], + "cpuflags" : [], + "systems" : ["windows", "Linux"] }, "test_presets" : { - "default" : { "base_branch" : "master", - "book_name" : "Pohl.epd", + "book_name" : "4moves_noob.epd", "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", + "test_confidence" : "[0.1, 0.05]", "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" + "draw_adj" : "movenumber=40 movecount=8 score=10", + "syzygy_adj" : "Optional" }, "STC" : { "both_options" : "Threads=1 Hash=8", "both_time_control" : "8.0+0.08", - "workload_size" : 32 + "workload_size" : 8 }, "LTC" : { "both_options" : "Threads=1 Hash=64", "both_time_control" : "40.0+0.4", - "workload_size" : 8 + "workload_size" : 4 }, "SMP STC" : { - "both_options" : "Threads=8 Hash=64", + "both_options" : "Threads=1 Hash=64", "both_time_control" : "5.0+0.05", - "workload_size" : 64 + "workload_size" : 8 }, "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", + "both_options" : "Threads=1 Hash=256", "both_time_control" : "20.0+0.2", - "workload_size" : 16 + "workload_size" : 4 } }, "tune_presets" : { - "default" : { - "book_name" : "Pohl.epd", + "book_name" : "4moves_noob.epd", "win_adj" : "movecount=3 score=400", "draw_adj" : "movenumber=40 movecount=8 score=10" } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } } } diff --git a/Engines/Halogen.json b/Engines/Halogen.json deleted file mode 100644 index 3800e003..00000000 --- a/Engines/Halogen.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 1893000, - "source" : "https://github.com/KierenP/Halogen", - - "build" : { - "path" : "src", - "compilers" : ["g++>=9.0.0", "clang++>=10.0.0"], - "cpuflags" : ["AVX2", "FMA", "POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "UHO_4060_v2.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8 OutputLevel=Minimal", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64 OutputLevel=Minimal", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64 OutputLevel=Minimal", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256 OutputLevel=Minimal", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "UHO_4060_v2.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Koivisto.json b/Engines/Koivisto.json deleted file mode 100644 index c5137a24..00000000 --- a/Engines/Koivisto.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 1327000, - "source" : "https://github.com/Luecx/Koivisto", - - "build" : { - "path" : "src_files", - "compilers" : ["g++"], - "cpuflags" : ["AVX2", "FMA", "POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 2.50]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Laser.json b/Engines/Laser.json deleted file mode 100644 index be393376..00000000 --- a/Engines/Laser.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 933000, - "source" : "https://github.com/jeffreyan11/uci-chess-engine", - - "build" : { - "path" : "src", - "compilers" : ["g++"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Midnight.json b/Engines/Midnight.json deleted file mode 100644 index 0bf3ffbb..00000000 --- a/Engines/Midnight.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "private" : false, - "nps" : 912000, - "source" : "https://github.com/archishou/MidnightChessEngine", - - "build" : { - "path" : "", - "compilers" : ["clang++", "g++"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "STC Simple" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "test_bounds" : "[-5.00, 0.00]" - }, - - "LTC Simple" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "test_bounds" : "[-5.00, 0.00]" - }, - - "STC Prog" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "book_name" : "8moves_v3.epd", - "test_max_games" : 3000 - }, - - "LTC Prog" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "book_name" : "8moves_v3.epd", - "test_max_games" : 1000 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Pawnocchio.json b/Engines/Pawnocchio.json new file mode 100644 index 00000000..62e847f4 --- /dev/null +++ b/Engines/Pawnocchio.json @@ -0,0 +1,115 @@ +{ + "private": false, + "nps": 1000000, + "source": "https://github.com/JonathanHallstrom/pawnocchio", + + "build": { + "path": "", + "compilers": ["zig"], + "cpuflags": [], + "systems": ["Linux", "Windows", "Darwin"] + }, + + "test_presets": { + "default": { + "book_name": "UHO_Lichess_4852_v1.epd", + "test_bounds": "[0.00, 4.00]", + "test_confidence": "[0.1, 0.05]", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "LTC": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + }, + + "STC regression": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-3.00, 1.00]" + }, + + "LTC regression": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-3.00, 1.00]" + }, + + "STC progtest": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_max_games": 3000 + }, + + "LTC progtest": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_max_games": 1000 + }, + + "SMP STC": { + "both_options": "Threads=4 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "SMP LTC": { + "both_options": "Threads=4 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + } + }, + + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "dev_options": "Threads=1 Hash=16", + "dev_time_control": "8.0+0.08" + }, + + "MTC": { + "dev_options": "Threads=1 Hash=32", + "dev_time_control": "20.0+0.20" + }, + + "LTC": { + "dev_options": "Threads=1 Hash=128", + "dev_time_control": "40.0+0.40" + }, + + "VLTC": { + "dev_options": "Threads=1 Hash=256", + "dev_time_control": "80.0+0.8" + } + }, + + "datagen_presets": { + "default": { + "win_adj": "None", + "draw_adj": "None", + "workload_size": 128 + }, + + "40k Nodes": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=40000" + } + } +} diff --git a/Engines/Perseus.json b/Engines/Perseus.json new file mode 100644 index 00000000..c34ef9ca --- /dev/null +++ b/Engines/Perseus.json @@ -0,0 +1,74 @@ +{ + "private" : false, + "nps" : 1500000, + "source" : "https://github.com/TheRealGioviok/Perseus-Engine", + + "build" : { + "path" : "src", + "compilers" : ["clang++"], + "cpuflags" : [], + "systems" : ["Linux", "Windows"] + }, + + "test_presets" : { + + "default" : { + "base_branch" : "main", + "book_name" : "UHO_Lichess_4852_v1.epd", + "test_bounds" : "[0.00, 3.00]", + "test_confidence" : "[0.05, 0.05]", + "win_adj" : "movecount=3 score=400", + "draw_adj" : "movenumber=40 movecount=12 score=5" + }, + + "STC" : { + "both_options" : "Threads=1 Hash=16", + "both_time_control" : "8.0+0.08", + "workload_size" : 32 + }, + + "MTC" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "30.0+0.30", + "workload_size" : 8 + }, + + "LTC" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "60.0+0.60", + "workload_size" : 8 + }, + + "STC regression" : { + "both_options" : "Threads=1 Hash=16", + "both_time_control" : "10.0+0.10", + "workload_size" : 32, + "test_bounds" : "[-3.00, 0.00]" + }, + + "LTC regression" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "60.0+0.60", + "workload_size" : 8, + "test_bounds" : "[-3.00, 1.00]" + } + }, + + "tune_presets" : { + + "default" : { + "book_name" : "UHO_Lichess_4852_v1.epd", + "win_adj" : "movecount=3 score=500", + "draw_adj" : "movenumber=40 movecount=8 score=5" + } + }, + + "datagen_presets" : { + + "default" : { + "win_adj" : "None", + "draw_adj" : "None", + "workload_size" : 128 + } + } +} diff --git a/Engines/Pytteliten.json b/Engines/Pytteliten.json deleted file mode 100644 index 2eef9f96..00000000 --- a/Engines/Pytteliten.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "private" : false, - "nps" : 5288029, - "source" : "https://github.com/crippa1337/Pytteliten", - - "build" : { - "path" : "", - "compilers" : ["clang++", "g++"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "4moves_noob.epd", - "test_bounds" : "[0.00, 10.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "STC regression" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "test_bounds" : "[-10.00, 0.00]" - }, - - "LTC regression" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "test_bounds" : "[-10.00, 0.00]" - }, - - "STC progtest" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "10.0+0.1", - "workload_size" : 32, - "book_name" : "8moves_v3.epd", - "test_max_games" : 4000 - }, - - "LTC progtest" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "book_name" : "8moves_v3.epd", - "test_max_games" : 2000 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "4moves_noob.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Rose.json b/Engines/Rose.json new file mode 100644 index 00000000..00e263e3 --- /dev/null +++ b/Engines/Rose.json @@ -0,0 +1,116 @@ +{ + "private": false, + "nps": 17901866, + "source": "https://github.com/87flowers/Rose", + + "build": { + "path" : "", + "compilers" : ["clang++"], + "cpuflags" : ["AVX512VNNI"], + "systems" : ["Linux", "Windows", "Darwin"] + }, + + "test_presets": { + "default": { + "base_branch": "main", + "book_name": "8moves_v3.epd", + "test_bounds": "[0, 5]", + "test_confidence": "[0.1, 0.05]", + "win_adj": "None", + "draw_adj": "None" + }, + + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "LTC": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + }, + + "STC regression": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-5, 0]" + }, + + "LTC regression": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-5, 0]" + }, + + "STC progtest": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_max_games": 3000 + }, + + "LTC progtest": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "60.0+0.6", + "workload_size": 8, + "test_max_games": 1000 + }, + + "SMP STC": { + "both_options": "Threads=4 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "SMP LTC": { + "both_options": "Threads=4 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + } + }, + + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "dev_options": "Threads=1 Hash=8", + "dev_time_control": "10.0+0.10" + }, + + "MTC": { + "dev_options": "Threads=1 Hash=32", + "dev_time_control": "30.0+0.30" + }, + + "LTC": { + "dev_options": "Threads=1 Hash=64", + "dev_time_control": "60.0+0.60" + }, + + "VLTC": { + "dev_options": "Threads=1 Hash=128", + "dev_time_control": "180.0+1.80" + } + }, + + "datagen_presets": { + "default": { + "win_adj": "None", + "draw_adj": "None", + "workload_size": 128 + }, + + "40k Nodes": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=40000" + } + } +} diff --git a/Engines/RubiChess.json b/Engines/RubiChess.json deleted file mode 100644 index 0245fe7c..00000000 --- a/Engines/RubiChess.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 760000, - "source" : "https://github.com/Matthies/RubiChess", - - "build" : { - "path" : "src", - "compilers" : ["g++"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "10.0+0.1", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "60.0+0.6", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Seer.json b/Engines/Seer.json deleted file mode 100644 index 803cbf62..00000000 --- a/Engines/Seer.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 785000, - "source" : "https://github.com/connormcmonigle/seer-nnue", - - "build" : { - "path" : "build", - "compilers" : ["g++>=9.0.0"], - "cpuflags" : ["AVX2", "FMA", "POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "main", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Stash.json b/Engines/Stash.json index 37b713e2..bc52daee 100644 --- a/Engines/Stash.json +++ b/Engines/Stash.json @@ -1,71 +1,89 @@ { - "private" : false, - "nps" : 1615000, - "source" : "https://github.com/mhouppin/stash-bot", + "private": false, + "nps": 1615000, + "source": "https://github.com/mhouppin/stash-bot", - "build" : { - "path" : "src", - "compilers" : ["gcc", "clang"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] + "build": { + "path": "src", + "compilers": ["gcc", "clang"], + "cpuflags": [], + "systems": ["Windows", "Linux"] }, - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "4moves_noob.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" + "test_presets": { + "default": { + "base_branch": "master", + "book_name": "UHO_4060_v2.epd", + "test_bounds": "[0.00, 5.00]", + "test_confidence": "[0.05, 0.05]", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" }, - - "STC" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + "LTC": { + "both_options": "Threads=1 Hash=64", + "both_time_control": "40.0+0.4", + "workload_size": 8 }, - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 + "Nonreg STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-4.00, 1.00]" }, - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 + "Nonreg LTC": { + "both_options": "Threads=1 Hash=64", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-4.00, 1.00]" }, - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 + "SMP STC": { + "both_options": "Threads=8 Hash=64", + "both_time_control": "5.0+0.05", + "workload_size": 64 + }, + "SMP LTC": { + "both_options": "Threads=8 Hash=256", + "both_time_control": "20.0+0.2", + "workload_size": 16 } }, - "tune_presets" : { + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "dev_options": "Threads=1 Hash=16", + "dev_time_control": "8.0+0.08" + }, - "default" : { - "book_name" : "4moves_noob.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" + "LTC": { + "dev_options": "Threads=1 Hash=64", + "dev_time_control": "40.0+0.4" } }, - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 + "datagen_presets": { + "default": { + "win_adj": "movecount=3 score=1410", + "draw_adj": "movenumber=40 movecount=8 score=15", + "workload_size": 256 }, - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" + "16k Nodes": { + "both_options": "Threads=1 Hash=1 NormalizeScore=false", + "both_time_control": "N=16000" } } } diff --git a/Engines/Stockfish.json b/Engines/Stockfish.json index a09d424a..7f436a61 100644 --- a/Engines/Stockfish.json +++ b/Engines/Stockfish.json @@ -13,8 +13,8 @@ "test_presets" : { "default" : { - "base_branch" : "ob_16.1", - "book_name" : "Pohl.epd", + "base_branch" : "ob_17", + "book_name" : "UHO_Lichess_4852_v1.epd", "test_bounds" : "[0.00, 2.00]", "test_confidence" : "[0.05, 0.05]", "win_adj" : "movecount=5 score=600", @@ -95,7 +95,7 @@ "tune_presets" : { "default" : { - "book_name" : "Pohl.epd", + "book_name" : "UHO_Lichess_4852_v1.epd", "win_adj" : "movecount=5 score=600", "draw_adj" : "movenumber=32 movecount=6 score=15" } diff --git a/Engines/Svart.json b/Engines/Svart.json deleted file mode 100644 index 7e2d947e..00000000 --- a/Engines/Svart.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "private" : false, - "nps" : 1350000, - "source" : "https://github.com/crippa1337/svart", - - "build" : { - "path" : "", - "compilers" : ["cargo>=1.70.0"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "STC regression" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "test_bounds" : "[-5.00, 0.00]" - }, - - "LTC regression" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "test_bounds" : "[-5.00, 0.00]" - }, - - "STC progtest" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "10.0+0.1", - "workload_size" : 32, - "book_name" : "8moves_v3.epd", - "test_max_games" : 4000 - }, - - "LTC progtest" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "book_name" : "8moves_v3.epd", - "test_max_games" : 2000 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Drofa.json b/Engines/Tantabus.json similarity index 64% rename from Engines/Drofa.json rename to Engines/Tantabus.json index 91501868..47692783 100644 --- a/Engines/Drofa.json +++ b/Engines/Tantabus.json @@ -1,71 +1,56 @@ { "private" : false, - "nps" : 1165000, - "source" : "https://github.com/justNo4b/Drofa", + "nps" : 2850322, + "source" : "https://github.com/analog-hors/tantabus", "build" : { "path" : "", - "compilers" : ["g++"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] + "compilers" : ["cargo>=1.62.0"], + "cpuflags" : [], + "systems" : ["windows", "Linux"] }, "test_presets" : { - "default" : { "base_branch" : "master", "book_name" : "4moves_noob.epd", "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", + "test_confidence" : "[0.1, 0.05]", "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" + "draw_adj" : "movenumber=40 movecount=8 score=10", + "syzygy_adj" : "Optional" }, "STC" : { "both_options" : "Threads=1 Hash=8", "both_time_control" : "8.0+0.08", - "workload_size" : 32 + "workload_size" : 8 }, "LTC" : { "both_options" : "Threads=1 Hash=64", "both_time_control" : "40.0+0.4", - "workload_size" : 8 + "workload_size" : 4 }, "SMP STC" : { "both_options" : "Threads=8 Hash=64", "both_time_control" : "5.0+0.05", - "workload_size" : 64 + "workload_size" : 8 }, "SMP LTC" : { "both_options" : "Threads=8 Hash=256", "both_time_control" : "20.0+0.2", - "workload_size" : 16 + "workload_size" : 4 } }, "tune_presets" : { - "default" : { "book_name" : "4moves_noob.epd", "win_adj" : "movecount=3 score=400", "draw_adj" : "movenumber=40 movecount=8 score=10" } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } } } diff --git a/Engines/Toad.json b/Engines/Toad.json new file mode 100644 index 00000000..8eacdda0 --- /dev/null +++ b/Engines/Toad.json @@ -0,0 +1,118 @@ +{ + "private": false, + "nps": 1810614, + "source": "https://github.com/dannyhammer/toad", + + "build": { + "path": "", + "compilers": ["cargo>=1.80.1"], + "cpuflags": [], + "systems": ["Linux", "Windows", "Darwin"] + }, + + "test_presets": { + "default": { + "base_branch": "main", + "dev_branch": "dev", + "book_name": "UHO_Lichess_4852_v1.epd", + "test_bounds": "[0.00, 5.00]", + "test_confidence": "[0.10, 0.05]", + "win_adj": "movecount=3 score=400", + "draw_adj": "None", + "priority": "-42" + }, + + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "LTC": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + }, + + "STC regression": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-5.00, 0.00]" + }, + + "LTC regression": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-5.00, 0.00]" + }, + + "STC progtest": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_max_games": 3000 + }, + + "LTC progtest": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "60.0+0.6", + "workload_size": 8, + "test_max_games": 1000 + }, + + "SMP STC": { + "both_options": "Threads=4 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "SMP LTC": { + "both_options": "Threads=4 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + } + }, + + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "dev_options": "Threads=1 Hash=8", + "dev_time_control": "10.0+0.10" + }, + + "MTC": { + "dev_options": "Threads=1 Hash=32", + "dev_time_control": "30.0+0.30" + }, + + "LTC": { + "dev_options": "Threads=1 Hash=64", + "dev_time_control": "60.0+0.60" + }, + + "VLTC": { + "dev_options": "Threads=1 Hash=128", + "dev_time_control": "180.0+1.80" + } + }, + + "datagen_presets": { + "default": { + "win_adj": "None", + "draw_adj": "None", + "workload_size": 128 + }, + + "40k Nodes": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=40000" + } + } +} diff --git a/Engines/Torch.json b/Engines/Torch.json index da993d6d..d76146d7 100644 --- a/Engines/Torch.json +++ b/Engines/Torch.json @@ -12,7 +12,7 @@ "default" : { "base_branch" : "master", - "book_name" : "UHO_4060_v2.epd", + "book_name" : "UHO_Lichess_4852_v1.epd", "test_bounds" : "[0.00, 2.00]", "test_confidence" : "[0.05, 0.05]", "win_adj" : "movecount=5 score=300", @@ -33,15 +33,15 @@ "upload_pgns" : "COMPACT" }, - "SMP STC" : { + "STC 6-threads" : { "both_options" : "Threads=6 Hash=128 Minimal=true", "both_time_control" : "10.0+0.1", "workload_size" : 64, "upload_pgns" : "COMPACT" }, - "SMP LTC" : { - "both_options" : "Threads=15 Hash=256 Minimal=true", + "LTC 15-threads" : { + "both_options" : "Threads=15 Hash=512 Minimal=true", "both_time_control" : "30.0+0.3", "workload_size" : 16, "test_bounds" : "[0.50, 2.50]", @@ -63,28 +63,12 @@ "upload_pgns" : "COMPACT" }, - "STC Regression" : { - "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "15.0+0.15", - "workload_size" : 32, - "book_name" : "Pohl.epd", - "test_max_games" : 40000 - }, - - "LTC Regression" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "60.0+0.6", - "workload_size" : 8, - "book_name" : "Pohl.epd", - "test_max_games" : 40000, - "upload_pgns" : "COMPACT" - }, - - "STC Fixed Games" : { + "Speed Test" : { "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "15.0+0.15", - "workload_size" : 32, - "test_max_games" : 40000 + "both_time_control" : "N=100000", + "workload_size" : 128, + "test_max_games" : 80000, + "upload_pgns" : "VERBOSE" }, "LTC Fixed Games" : { @@ -92,30 +76,15 @@ "both_time_control" : "60.0+0.6", "workload_size" : 8, "test_max_games" : 40000 - }, - - "57.5k Fixed Nodes" : { - "both_options" : "Threads=1 Hash=64 Minimal=true", - "both_time_control" : "N=57500", - "workload_size" : 128, - "test_max_games" : 40000 - }, - - "Speed Test" : { - "both_options" : "Threads=1 Hash=16 Minimal=true", - "both_time_control" : "N=100000", - "workload_size" : 128, - "test_max_games" : 80000, - "upload_pgns" : "VERBOSE" } }, "tune_presets" : { "default" : { - "book_name" : "UHO_4060_v2.epd", - "win_adj" : "movecount=5 score=300", - "draw_adj" : "movenumber=32 movecount=6 score=8" + "book_name" : "UHO_Lichess_4852_v1.epd", + "win_adj" : "movecount=5 score=300", + "draw_adj" : "movenumber=32 movecount=6 score=8" }, "STC" : { @@ -123,11 +92,6 @@ "dev_time_control" : "15.0+0.15" }, - "MTC" : { - "dev_options" : "Threads=1 Hash=32 Minimal=true", - "dev_time_control" : "30.0+0.30" - }, - "LTC" : { "dev_options" : "Threads=1 Hash=64 Minimal=true", "dev_time_control" : "60.0+0.60" @@ -136,30 +100,32 @@ "VLTC" : { "dev_options" : "Threads=1 Hash=128 Minimal=true", "dev_time_control" : "180.0+1.80" + }, + + "LTC 6-threads" : { + "dev_options" : "Threads=6 Hash=512 Minimal=true", + "dev_time_control" : "60.0+0.60" } }, "datagen_presets" : { "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 64 - }, - - "57.5k Hard Nodes" : { - "both_options" : "Threads=1 Hash=16 Minimal=true Normalize=false UseSoftNodes=false", - "both_time_control" : "N=57500" + "both_options" : "Threads=1 Hash=16 Minimal=true Normalize=false UseSoftNodes=true NodeFudging=4000", + "both_time_control" : "N=32000", + "datagen_max_games" : 20000000, + "draw_adj" : "None", + "upload_pgns" : "COMPACT", + "win_adj" : "None", + "workload_size" : 64 }, - "36k Soft Nodes" : { - "both_options" : "Threads=1 Hash=16 Minimal=true Normalize=false UseSoftNodes=true", - "both_time_control" : "N=36000" + "Default + UHOLichess" : { + "book_name" : "UHO_Lichess_4852_v1.epd" }, - "36k Soft Nodes w/ 1k Node Fudging" : { - "both_options" : "Threads=1 Hash=16 Minimal=true Normalize=false UseSoftNodes=true NodeFudging=1000", - "both_time_control" : "N=36000" + "Default + No Book" : { + "book_name" : "None" } } } diff --git a/Engines/Wahoo.json b/Engines/Wahoo.json deleted file mode 100644 index 9777215a..00000000 --- a/Engines/Wahoo.json +++ /dev/null @@ -1,89 +0,0 @@ -{ - "private" : false, - "nps" : 2680000, - "source" : "https://github.com/spamdrew128/Wahoo", - - "build" : { - "path" : "", - "compilers" : ["cargo>=1.60.0"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "dev", - "book_name" : "noob_4moves.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "None", - "draw_adj" : "None" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "STC Simple" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "test_bounds" : "[-5.00, 0.00]" - }, - - "LTC Simple" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "test_bounds" : "[-5.00, 0.00]" - }, - - "STC Prog" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32, - "book_name" : "8moves_v3.epd", - "test_max_games" : 3000 - }, - - "LTC Prog" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", - "workload_size" : 8, - "book_name" : "8moves_v3.epd", - "test_max_games" : 1000 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "noob_4moves.epd", - "win_adj" : "None", - "draw_adj" : "None" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Weiss.json b/Engines/Weiss.json index 923b9d5b..73d6e4c5 100644 --- a/Engines/Weiss.json +++ b/Engines/Weiss.json @@ -14,7 +14,7 @@ "default" : { "base_branch" : "master", - "book_name" : "UHO_4060_v2.epd", + "book_name" : "UHO_Lichess_4852_v1.epd", "test_bounds" : "[0.00, 3.00]", "test_confidence" : "[0.05, 0.05]", "win_adj" : "movecount=3 score=400", @@ -22,39 +22,39 @@ }, "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", + "both_options" : "Threads=1 Hash=32 Minimal=true", + "both_time_control" : "10.0+0.10", "workload_size" : 32 }, "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", + "both_options" : "Threads=1 Hash=128 Minimal=true", + "both_time_control" : "60.0+0.6", "workload_size" : 8 }, "STC Simplify" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", + "both_options" : "Threads=1 Hash=32 Minimal=true", + "both_time_control" : "10.0+0.10", "workload_size" : 32, "test_bounds" : "[-3.00, 0.00]" }, "LTC Simplify" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "40.0+0.4", + "both_options" : "Threads=1 Hash=128 Minimal=true", + "both_time_control" : "60.0+0.6", "workload_size" : 8, "test_bounds" : "[-3.00, 0.00]" }, "SMP STC" : { - "both_options" : "Threads=8 Hash=128", + "both_options" : "Threads=8 Hash=128 Minimal=true", "both_time_control" : "5.0+0.05", "workload_size" : 64 }, "SMP LTC" : { - "both_options" : "Threads=8 Hash=512", + "both_options" : "Threads=8 Hash=512 Minimal=true", "both_time_control" : "20.0+0.2", "workload_size" : 16 } @@ -69,13 +69,13 @@ }, "STC" : { - "dev_options" : "Threads=1 Hash=32", - "dev_time_control" : "8.0+0.08" + "dev_options" : "Threads=1 Hash=32 Minimal=true", + "dev_time_control" : "10.0+0.10" }, "LTC" : { - "dev_options" : "Threads=1 Hash=128", - "dev_time_control" : "40.0+0.4" + "dev_options" : "Threads=1 Hash=128 Minimal=true", + "dev_time_control" : "60.0+0.6" } }, @@ -88,7 +88,7 @@ }, "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", + "both_options" : "Threads=1 Hash=16 Minimal=true", "both_time_control" : "N=40000" } } diff --git a/Engines/Willow.json b/Engines/Willow.json deleted file mode 100644 index 3c4e6d31..00000000 --- a/Engines/Willow.json +++ /dev/null @@ -1,59 +0,0 @@ -{ - "private" : false, - "nps" : 850000, - "source" : "https://github.com/Adam-Kulju/Willow", - - "build" : { - "path" : "", - "compilers" : ["clang++", "g++"], - "cpuflags" : [], - "systems" : ["Linux", "Windows", "Darwin"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "main", - "book_name" : "4moves_noob.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=32", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=128", - "both_time_control" : "60.0+0.6", - "workload_size" : 8 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "4moves_noob.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Winter.json b/Engines/Winter.json deleted file mode 100644 index cd6aced4..00000000 --- a/Engines/Winter.json +++ /dev/null @@ -1,71 +0,0 @@ -{ - "private" : false, - "nps" : 360000, - "source" : "https://github.com/rosenthj/Winter", - - "build" : { - "path" : "", - "compilers" : ["clang++", "g++"], - "cpuflags" : ["POPCNT"], - "systems" : ["Windows", "Linux"] - }, - - "test_presets" : { - - "default" : { - "base_branch" : "master", - "book_name" : "Pohl.epd", - "test_bounds" : "[0.00, 5.00]", - "test_confidence" : "[0.05, 0.05]", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - }, - - "STC" : { - "both_options" : "Threads=1 Hash=8", - "both_time_control" : "8.0+0.08", - "workload_size" : 32 - }, - - "LTC" : { - "both_options" : "Threads=1 Hash=64", - "both_time_control" : "40.0+0.4", - "workload_size" : 8 - }, - - "SMP STC" : { - "both_options" : "Threads=8 Hash=64", - "both_time_control" : "5.0+0.05", - "workload_size" : 64 - }, - - "SMP LTC" : { - "both_options" : "Threads=8 Hash=256", - "both_time_control" : "20.0+0.2", - "workload_size" : 16 - } - }, - - "tune_presets" : { - - "default" : { - "book_name" : "Pohl.epd", - "win_adj" : "movecount=3 score=400", - "draw_adj" : "movenumber=40 movecount=8 score=10" - } - }, - - "datagen_presets" : { - - "default" : { - "win_adj" : "None", - "draw_adj" : "None", - "workload_size" : 128 - }, - - "40k Nodes" : { - "both_options" : "Threads=1 Hash=16", - "both_time_control" : "N=40000" - } - } -} diff --git a/Engines/Yukari.json b/Engines/Yukari.json new file mode 100644 index 00000000..47f205d4 --- /dev/null +++ b/Engines/Yukari.json @@ -0,0 +1,110 @@ +{ + "private": false, + "nps": 759223, + "source": "https://github.com/yukarichess/yukari", + "protocols": ["xboard", "uci"], + + "build": { + "path": "", + "compilers": ["cargo>=1.82.0-nightly"], + "cpuflags": [], + "systems": ["Linux", "Windows", "Darwin"] + }, + + "test_presets": { + "default": { + "base_branch": "trunk", + "book_name": "UHO_Lichess_4852_v1.epd", + "test_bounds": "[0.00, 5.00]", + "test_confidence": "[0.10, 0.05]", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "LTC": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + }, + + "STC regression": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-5.00, 0.00]" + }, + + "LTC regression": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-5.00, 0.00]" + }, + + "STC progtest": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_max_games": 3000 + }, + + "LTC progtest": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "60.0+0.6", + "workload_size": 8, + "test_max_games": 1000 + }, + + "NNUE validation": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=25000" + } + }, + + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "dev_options": "Threads=1 Hash=8", + "dev_time_control": "10.0+0.10" + }, + + "MTC": { + "dev_options": "Threads=1 Hash=32", + "dev_time_control": "30.0+0.30" + }, + + "LTC": { + "dev_options": "Threads=1 Hash=64", + "dev_time_control": "60.0+0.60" + }, + + "VLTC": { + "dev_options": "Threads=1 Hash=128", + "dev_time_control": "180.0+1.80" + } + }, + + "datagen_presets": { + "default": { + "win_adj": "None", + "draw_adj": "None", + "workload_size": 128 + }, + + "40k Nodes": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=40000" + } + } +} diff --git a/Engines/bannou.json b/Engines/bannou.json new file mode 100644 index 00000000..c2d156b0 --- /dev/null +++ b/Engines/bannou.json @@ -0,0 +1,116 @@ +{ + "private": false, + "nps": 2791491, + "source": "https://github.com/87flowers/bannou", + + "build": { + "path": "", + "compilers": ["zig"], + "cpuflags": [], + "systems": ["Linux", "Windows", "Darwin"] + }, + + "test_presets": { + "default": { + "base_branch": "main", + "book_name": "8moves_v3.epd", + "test_bounds": "[0, 5]", + "test_confidence": "[0.1, 0.05]", + "win_adj": "None", + "draw_adj": "None" + }, + + "STC": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "LTC": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + }, + + "STC regression": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_bounds": "[-5, 0]" + }, + + "LTC regression": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8, + "test_bounds": "[-5, 0]" + }, + + "STC progtest": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32, + "test_max_games": 3000 + }, + + "LTC progtest": { + "both_options": "Threads=1 Hash=128", + "both_time_control": "60.0+0.6", + "workload_size": 8, + "test_max_games": 1000 + }, + + "SMP STC": { + "both_options": "Threads=4 Hash=16", + "both_time_control": "8.0+0.08", + "workload_size": 32 + }, + + "SMP LTC": { + "both_options": "Threads=4 Hash=128", + "both_time_control": "40.0+0.4", + "workload_size": 8 + } + }, + + "tune_presets": { + "default": { + "book_name": "UHO_4060_v2.epd", + "win_adj": "movecount=3 score=400", + "draw_adj": "movenumber=40 movecount=8 score=10" + }, + + "STC": { + "dev_options": "Threads=1 Hash=8", + "dev_time_control": "10.0+0.10" + }, + + "MTC": { + "dev_options": "Threads=1 Hash=32", + "dev_time_control": "30.0+0.30" + }, + + "LTC": { + "dev_options": "Threads=1 Hash=64", + "dev_time_control": "60.0+0.60" + }, + + "VLTC": { + "dev_options": "Threads=1 Hash=128", + "dev_time_control": "180.0+1.80" + } + }, + + "datagen_presets": { + "default": { + "win_adj": "None", + "draw_adj": "None", + "workload_size": 128 + }, + + "40k Nodes": { + "both_options": "Threads=1 Hash=16", + "both_time_control": "N=40000" + } + } +} diff --git a/Engines/ice4.json b/Engines/ice4.json new file mode 100644 index 00000000..95d5392a --- /dev/null +++ b/Engines/ice4.json @@ -0,0 +1,89 @@ +{ + "private" : false, + "nps" : 2200000, + "source" : "https://github.com/MinusKelvin/ice4", + + "build" : { + "path" : "", + "compilers" : ["g++"], + "cpuflags" : [], + "systems" : ["Linux"] + }, + + "test_presets" : { + "default" : { + "base_branch" : "main", + "book_name" : "4moves_noob.epd", + "test_bounds" : "[0.00, 5.00]", + "test_confidence" : "[0.1, 0.05]", + "win_adj" : "None", + "draw_adj" : "None", + "syzygy_adj" : "Disabled" + }, + + "STC" : { + "both_options" : "Threads=1 Hash=8", + "both_time_control" : "8.0+0.08", + "workload_size" : 8 + }, + + "LTC" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "60.0+0.6", + "workload_size" : 4 + }, + + "SMP STC" : { + "both_options" : "Threads=4 Hash=32", + "both_time_control" : "6.0+0.06", + "workload_size" : 8 + }, + + "SMP LTC" : { + "both_options" : "Threads=8 Hash=128", + "both_time_control" : "25.0+0.25", + "workload_size" : 4 + }, + + "STC Regression" : { + "both_options" : "Threads=1 Hash=8", + "both_time_control" : "8.0+0.08", + "test_bounds" : "[-5.00, 0.00]", + "test_confidence" : "[0.05, 0.05]", + "workload_size" : 8 + }, + + "LTC Regression" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "60.0+0.6", + "test_bounds" : "[-5.00, 0.00]", + "test_confidence" : "[0.05, 0.05]", + "workload_size" : 4 + }, + + "STC Elo" : { + "both_options" : "Threads=1 Hash=8", + "both_time_control" : "8.0+0.08", + "test_max_games" : 10000, + "workload_size" : 8 + }, + + "LTC Elo" : { + "both_options" : "Threads=1 Hash=64", + "both_time_control" : "60.0+0.6", + "test_max_games" : 10000, + "workload_size" : 4 + } + }, + + "tune_presets" : { + "default" : { + "dev_options" : "Threads=1 Hash=64", + "dev_time_control" : "60.0+0.6", + "book_name" : "UHO_Lichess_4852_v1.epd", + "win_adj" : "None", + "draw_adj" : "None", + "syzygy_adj" : "Disabled" + } + } +} diff --git a/OpenBench/apps.py b/OpenBench/apps.py index 5f23ad98..b523716c 100644 --- a/OpenBench/apps.py +++ b/OpenBench/apps.py @@ -18,8 +18,42 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +import atexit +import os +import threading +import platform + import django.apps +# No imports of OpenBench.* are allowed here + +LOCKFILE_PATH = 'openbench_watchers.lock' +CONFIG_LOCK = threading.Lock() +IS_WINDOWS = platform.system() == 'Windows' + +def acquire_watcher_lockfile(): + + lockfile = None + + try: # Failed to open the file entirely + lockfile = open(LOCKFILE_PATH, 'w') + except: return None + + try: + + if IS_WINDOWS: + import msvcrt + msvcrt.locking(lockfile.fileno(), msvcrt.LK_NBLCK, 1) + + else: + import fcntl + fcntl.lockf(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB) + + except: # Failed to acquire the lock, but must still close the file + lockfile.close() + return None + + return lockfile class OpenBenchConfig(django.apps.AppConfig): @@ -27,7 +61,53 @@ class OpenBenchConfig(django.apps.AppConfig): def ready(self): + # Load all of the .json config files, only once per PROCESS. + # This must be done before ANY other OpenBench includes are used. + from OpenBench import config - if config.OPENBENCH_CONFIG is None: - config.OPENBENCH_CONFIG = config.create_openbench_config() + with CONFIG_LOCK: + if config.OPENBENCH_CONFIG is None: + config.OPENBENCH_CONFIG, config.OPENBENCH_CONFIG_CHECKSUM = config.create_openbench_config() + + # Attempt to spawn the Artifact and PGN Watchers, globally once + + from OpenBench.watcher import ArtifactWatcher + from OpenBench.pgn_watcher import PGNWatcher + + # Result of fopen(LOCKFILE_PATH) after obtaining the lock, otherwise None + self.lockfile = acquire_watcher_lockfile() + + if self.lockfile: + + # Signals to stop the watchers + self.stop_artifact_watcher = threading.Event() + self.stop_pgn_watcher = threading.Event() + + # Each watcher is a threading.Thread + self.artifact_watcher = ArtifactWatcher(self.stop_artifact_watcher, daemon=True) + self.pgn_watcher = PGNWatcher(self.stop_pgn_watcher, daemon=True) + + # Start everything + self.artifact_watcher.start() + self.pgn_watcher.start() + + # Ensure we cleanup upon exit + atexit.register(self.shutdown) + + def shutdown(self): + + # Signal the Artifact Watcher to shutdown + if hasattr(self, 'artifact_watcher') and self.artifact_watcher.is_alive(): + self.stop_artifact_watcher.set() + self.artifact_watcher.join() + + # Signal the PGN Watcher to shutdown + if hasattr(self, 'pgn_watcher') and self.pgn_watcher.is_alive(): + self.stop_pgn_watcher.set() + self.pgn_watcher.join() + + # Cleanup Lockfile if we hold it + if self.lockfile: + self.lockfile.close() + os.remove(LOCKFILE_PATH) diff --git a/OpenBench/config.py b/OpenBench/config.py index 69f86933..d16e752d 100644 --- a/OpenBench/config.py +++ b/OpenBench/config.py @@ -18,6 +18,7 @@ # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +import hashlib import json import os import sys @@ -25,9 +26,10 @@ from OpenSite.settings import PROJECT_PATH -OPENBENCH_STATIC_VERSION = 'v4' +OPENBENCH_STATIC_VERSION = 'v6' -OPENBENCH_CONFIG = None # Initialized by OpenBench/apps.py +OPENBENCH_CONFIG = None # Initialized by OpenBench/apps.py +OPENBENCH_CONFIG_CHECKSUM = None # Initialized by OpenBench/apps.py def create_openbench_config(): @@ -35,7 +37,6 @@ def create_openbench_config(): config_dict = json.load(fin) verify_general_config(config_dict) - config_dict['books'] = { book : load_book_config(book) for book in config_dict['books'] } @@ -44,7 +45,14 @@ def create_openbench_config(): engine : load_engine_config(engine) for engine in config_dict['engines'] } - return config_dict + # Rolling sha256sum of the engine's build configs + checksum = hashlib.sha256(b'').digest() + for engine, engine_config in config_dict['engines'].items(): + serialized = json.dumps(engine_config['build'], sort_keys=True) + partial_sum = hashlib.sha256(serialized.encode('utf-8')).digest() + checksum = bytes(a ^ b for a, b in zip(checksum, partial_sum)) + + return config_dict, checksum.hex() def load_book_config(book_name): @@ -62,6 +70,10 @@ def load_engine_config(engine_name): with open(os.path.join(PROJECT_PATH, 'Engines', '%s.json' % (engine_name))) as fin: conf = json.load(fin) + # Legacy- default to UCI protocol if none provided. + if 'protocols' not in conf: + conf['protocols'] = ['uci'] + verify_engine_basics(conf) verify_engine_build(engine_name, conf) @@ -107,6 +119,7 @@ def verify_engine_basics(conf): assert type(conf.get('nps')) == int and conf['nps'] > 0 assert type(conf.get('source')) == str assert type(conf.get('build')) == dict + assert type(conf.get('protocols')) == list def verify_engine_build(engine_name, conf): @@ -140,12 +153,14 @@ def verify_engine_test_preset(test_preset): 'dev_network', 'dev_options', 'dev_time_control', + 'dev_protocol', 'base_branch', 'base_bench', 'base_network', 'base_options', 'base_time_control', + 'base_protocol', 'test_bounds', 'test_confidence', @@ -176,6 +191,7 @@ def verify_engine_tune_preset(tune_preset): 'dev_network', 'dev_options', 'dev_time_control', + 'dev_protocol', 'spsa_reporting_type', 'spsa_distribution_type', @@ -215,12 +231,15 @@ def verify_engine_datagen_preset(datagen_preset): 'dev_network', 'dev_options', 'dev_time_control', + 'dev_protocol', 'base_branch', 'base_bench', 'base_network', 'base_options', 'base_time_control', + 'base_protocol', + 'book_name', 'upload_pgns', diff --git a/OpenBench/management/__init__.py b/OpenBench/management/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/OpenBench/management/commands/__init__.py b/OpenBench/management/commands/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/OpenBench/management/commands/runserver.py b/OpenBench/management/commands/runserver.py deleted file mode 100644 index 2f0aad61..00000000 --- a/OpenBench/management/commands/runserver.py +++ /dev/null @@ -1,39 +0,0 @@ -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# # -# OpenBench is a chess engine testing framework authored by Andrew Grant. # -# # -# # -# OpenBench is free software: you can redistribute it and/or modify # -# it under the terms of the GNU General Public License as published by # -# the Free Software Foundation, either version 3 of the License, or # -# (at your option) any later version. # -# # -# OpenBench is distributed in the hope that it will be useful, # -# but WITHOUT ANY WARRANTY; without even the implied warranty of # -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # -# GNU General Public License for more details. # -# # -# You should have received a copy of the GNU General Public License # -# along with this program. If not, see . # -# # -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # - -from OpenBench.watcher import ArtifactWatcher -from OpenBench.pgn_watcher import PGNWatcher - -from django.core.management.commands.runserver import Command as BaseRunserverCommand - -class Command(BaseRunserverCommand): - - def inner_run(self, *args, **options): - self.pre_start() - super().inner_run(*args, **options) - self.pre_quit() - - def pre_start(self): - self.watcher = ArtifactWatcher().start() - self.pgn_watcher = PGNWatcher().start() - - def pre_quit(self): - self.watcher.kill() - self.pgn_watcher.kill() \ No newline at end of file diff --git a/OpenBench/model_utils.py b/OpenBench/model_utils.py new file mode 100644 index 00000000..4f1c563d --- /dev/null +++ b/OpenBench/model_utils.py @@ -0,0 +1,24 @@ +from OpenBench.models import * + +from django.core.files.storage import FileSystemStorage +from django.forms.models import model_to_dict + +def network_to_dict(network): + return { **model_to_dict(network, exclude=['id']), 'created': str(network.created) } + + +def network_delete(network) -> (str, bool): + + # Don't allow deletion of important networks + if network.default or network.was_default: + return 'You may not delete Default, or previous Default networks', False + + # Save information before deleting the Network Model + status = 'Deleted %s for %s' % (network.name, network.engine) + sha256 = network.sha256; network.delete() + + # Only delete the actual file if no other engines use it + if not Network.objects.filter(sha256=sha256): + FileSystemStorage().delete(sha256) + + return status, True \ No newline at end of file diff --git a/OpenBench/models.py b/OpenBench/models.py index 2480d87a..a8ded2b6 100644 --- a/OpenBench/models.py +++ b/OpenBench/models.py @@ -20,7 +20,7 @@ from django.db.models import CharField, IntegerField, BooleanField, FloatField from django.db.models import JSONField, ForeignKey, DateTimeField, OneToOneField -from django.db.models import CASCADE, PROTECT, Model +from django.db.models import CASCADE, PROTECT, Model, TextChoices from django.contrib.auth.models import User class Engine(Model): @@ -29,6 +29,7 @@ class Engine(Model): source = CharField(max_length=1024) sha = CharField(max_length=64) bench = IntegerField(default=0) + protocols= JSONField(default=list) def __str__(self): return '{0} ({1})'.format(self.name, self.bench) @@ -89,6 +90,11 @@ def __str__(self): class Test(Model): + class ScaleMethod(TextChoices): + DEV = 'DEV' , 'DEV' + BASE = 'BASE', 'BASE' + BOTH = 'BOTH', 'BOTH' + # Misc information author = CharField(max_length=64) upload_pgns = CharField(max_length=16, default='FALSE') @@ -105,6 +111,7 @@ class Test(Model): dev_network = CharField(max_length=256, blank=True) dev_netname = CharField(max_length=256, blank=True) dev_time_control = CharField(max_length=32) + dev_protocol = CharField(max_length=32, default='uci') # Base Engine, and all of its settings base = ForeignKey('Engine', PROTECT, related_name='base') @@ -114,12 +121,17 @@ class Test(Model): base_network = CharField(max_length=256, blank=True) base_netname = CharField(max_length=256, blank=True) base_time_control = CharField(max_length=32) + base_protocol = CharField(max_length=32, default='uci') # Changable Test Parameters workload_size = IntegerField(default=32) priority = IntegerField(default=0) throughput = IntegerField(default=0) + # Scaling Mechanisms + scale_method = CharField(max_length=16, choices=ScaleMethod.choices, default=ScaleMethod.BASE) + scale_nps = IntegerField(default=0) + # Tablebases and Cutechess adjudicatoins syzygy_wdl = CharField(max_length=16, default='OPTIONAL') syzygy_adj = CharField(max_length=16, default='OPTIONAL') @@ -183,6 +195,9 @@ def as_penta(self): def as_nwld(self): return (self.games, self.wins, self.losses, self.draws) + def workload_type_str(self): + return {'SPSA' : 'tune', 'DATAGEN' : 'datagen'}.get(self.test_mode, 'test') + class LogEvent(Model): author = CharField(max_length=128) # Username for the OpenBench Profile diff --git a/OpenBench/pgn_watcher.py b/OpenBench/pgn_watcher.py index 7ec53928..e132944d 100644 --- a/OpenBench/pgn_watcher.py +++ b/OpenBench/pgn_watcher.py @@ -27,12 +27,16 @@ from OpenBench.models import PGN -from django.db import transaction +from django.db import transaction, OperationalError from django.core.files.base import ContentFile from django.core.files.storage import FileSystemStorage class PGNWatcher(threading.Thread): + def __init__(self, stop_event, *args, **kwargs): + self.stop_event = stop_event + super().__init__(*args, **kwargs) + def process_pgn(self, pgn): tar_path = FileSystemStorage('Media/PGNs').path('%d.pgn.tar' % (pgn.test_id)) @@ -56,11 +60,18 @@ def process_pgn(self, pgn): pgn.save() def run(self): - while True: - for pgn in PGN.objects.filter(processed=False): - try: + while not self.stop_event.wait(timeout=15): + + try: # Never exit on errors, to keep the watcher alive + for pgn in PGN.objects.filter(processed=False): self.process_pgn(pgn) - except: + + # Expect the database to be locked sometimes + except OperationalError as error: + if 'database is locked' not in str(error).lower(): traceback.print_exc() sys.stdout.flush() - time.sleep(15) + + except: # Totally unknown error + traceback.print_exc() + sys.stdout.flush() \ No newline at end of file diff --git a/OpenBench/static/create_workload.js b/OpenBench/static/create_workload.js index c2916825..de1c3d69 100644 --- a/OpenBench/static/create_workload.js +++ b/OpenBench/static/create_workload.js @@ -36,6 +36,27 @@ function create_network_options(field_id, engine) { } } +function create_protocol_options(field_id, engine) { + + var has_default = false; + var protocol_options = document.getElementById(field_id); + + // Delete all existing Protocols + while (protocol_options.length) + protocol_options.remove(0); + + // Add each Protocol that matches the given engine + for (const protocol of config.engines[engine].protocols) { + + var opt = document.createElement('option'); + opt.text = protocol; + opt.selected = false; + protocol_options.add(opt) + + has_default = has_default || protocol.default; + } +} + function create_preset_buttons(engine, workload_type) { // Clear out all of the existing buttons @@ -123,6 +144,7 @@ function set_engine(engine, target) { document.getElementById(target + '_repo' ).value = repos[engine] || config.engines[engine].source create_network_options(target + '_network', engine); + create_protocol_options(target + '_protocol', engine); } function set_option(option_name, option_value) { @@ -231,6 +253,9 @@ function change_engine(engine, target, workload_type) { if (target == 'dev' && (workload_type == 'TEST' || workload_type == 'DATAGEN')) set_engine(engine, 'base'); + set_option('scale_nps', config.engines[engine].nps); + set_option('scale_method', workload_type == 'TUNE' ? 'DEV' : 'BASE'); + apply_preset('STC', workload_type); } diff --git a/OpenBench/static/form.css b/OpenBench/static/form.css index 0b469bac..c8516afa 100644 --- a/OpenBench/static/form.css +++ b/OpenBench/static/form.css @@ -68,7 +68,7 @@ } #content .row input[type="file"]{ - padding: 2px 2px; + padding: 2px 2px; } #content .row input:focus, #content .row select:focus, #content .row textarea:focus { diff --git a/OpenBench/static/logo.svg b/OpenBench/static/logo.svg index 377b0d2c..bfa68d60 100644 --- a/OpenBench/static/logo.svg +++ b/OpenBench/static/logo.svg @@ -90,7 +90,7 @@ id="layer1" transform="translate(16.1122,-26.054426)"> + style="fill:#523172;fill-opacity:1;stroke-width:0.264583" /> /', OpenBench.views.index), - django.urls.path(r'user//', OpenBench.views.user), - django.urls.path(r'user///', OpenBench.views.user), - django.urls.path(r'greens/', OpenBench.views.greens), - django.urls.path(r'greens//', OpenBench.views.greens), + django.urls.re_path(r'^index(?:/(?P\d+))?/$', OpenBench.views.index), + django.urls.re_path(r'^user/(?P[^/]+)(?:/(?P\d+))?/$', OpenBench.views.user), + django.urls.re_path(r'^greens(?:/(?P\d+))?/$', OpenBench.views.greens), + django.urls.path(r'search/', OpenBench.views.search), # Links for viewing general information tables django.urls.path(r'users/', OpenBench.views.users), - django.urls.path(r'event//', OpenBench.views.event), - django.urls.path(r'events/', OpenBench.views.events_actions), - django.urls.path(r'events//', OpenBench.views.events_actions), - django.urls.path(r'errors/', OpenBench.views.events_errors), - django.urls.path(r'errors//', OpenBench.views.events_errors), - django.urls.path(r'machines/', OpenBench.views.machines), - django.urls.path(r'machines//', OpenBench.views.machines), - - # Links for viewing and managing tests - django.urls.path(r'test//', OpenBench.views.test), - django.urls.path(r'test//', OpenBench.views.test), - django.urls.path(r'newTest/', OpenBench.views.create_test), - - # Links for viewing and managing tunes - django.urls.path(r'tune//', OpenBench.views.tune), - django.urls.path(r'tune//', OpenBench.views.tune), - django.urls.path(r'newTune/', OpenBench.views.create_tune), + django.urls.path(r'event//', OpenBench.views.event), + django.urls.re_path(r'^events(?:/(?P\d+))?/$', OpenBench.views.events_actions), + django.urls.re_path(r'^errors(?:/(?P\d+))?/$', OpenBench.views.events_errors), + django.urls.re_path(r'^machines(?:/(?P\d+))?/$', OpenBench.views.machines), - # Links for viewing and managing datagen - django.urls.path(r'datagen//', OpenBench.views.datagen), - django.urls.path(r'datagen//', OpenBench.views.datagen), - django.urls.path(r'newDatagen/', OpenBench.views.create_datagen), + # Links to create, view or manage Workloads (Tests, Tunes, Datagen) + django.urls.re_path(r'^(?Ptune|test|datagen)/new/$', OpenBench.views.new_workload), + django.urls.re_path(r'^(?Ptune|test|datagen)/(?P\d+)(?:/(?P\w+))?/$', OpenBench.views.workload), # Links for viewing and managing Networks django.urls.path(r'networks/', OpenBench.views.networks), @@ -91,6 +75,7 @@ django.urls.path(r'api/config//', OpenBench.views.api_configs), django.urls.path(r'api/networks//', OpenBench.views.api_networks), django.urls.path(r'api/networks///', OpenBench.views.api_network_download), + django.urls.path(r'api/networks///delete/', OpenBench.views.api_network_delete), django.urls.path(r'api/buildinfo/', OpenBench.views.api_build_info), django.urls.path(r'api/pgns//', OpenBench.views.api_pgns), diff --git a/OpenBench/utils.py b/OpenBench/utils.py index 7a9eed44..ec580f03 100644 --- a/OpenBench/utils.py +++ b/OpenBench/utils.py @@ -23,7 +23,6 @@ import json import math import os -import random import re import requests @@ -36,14 +35,16 @@ from django.utils import timezone from wsgiref.util import FileWrapper -from OpenSite.settings import MEDIA_ROOT +from OpenSite.settings import MEDIA_ROOT, PROJECT_PATH from OpenBench.config import OPENBENCH_CONFIG from OpenBench.models import * from OpenBench.stats import TrinomialSPRT, PentanomialSPRT +from OpenBench.templatetags.mytags import longStatBlock import OpenBench.views +import OpenBench.model_utils class TimeControl(object): @@ -124,10 +125,21 @@ def control_base(time_str): +def workload_uses_time_based_tc(workload): + + dev_type = TimeControl.control_type(workload.dev_time_control) + base_type = TimeControl.control_type(workload.base_time_control) + + return workload.upload_pgns == 'VERBOSE' \ + or (dev_type != TimeControl.FIXED_NODES and dev_type != TimeControl.FIXED_DEPTH) \ + or (base_type != TimeControl.FIXED_NODES and base_type != TimeControl.FIXED_DEPTH) + + def read_git_credentials(engine): - fname = 'Config/credentials.%s' % (engine.replace(' ', '').lower()) - if os.path.exists(fname): - with open(fname) as fin: + fname = 'credentials.%s' % (engine.replace(' ', '').lower()) + fpath = os.path.join(PROJECT_PATH, 'Config', fname) + if os.path.exists(fpath): + with open(fpath) as fin: return { 'Authorization' : 'token %s' % fin.readlines()[0].rstrip() } def path_join(*args): @@ -329,21 +341,12 @@ def network_default(request, engine, network): def network_delete(request, engine, network): - # Don't allow deletion of important networks - if network.default or network.was_default: - error = 'You may not delete Default, or previous Default networks.' - return OpenBench.views.redirect(request, '/networks/%s/' % (engine), error=error) + message, success = OpenBench.model_utils.network_delete(network) - # Save information before deleting the Network Model - status = 'Deleted %s for %s' % (network.name, network.engine) - sha256 = network.sha256; network.delete() - - # Only delete the actual file if no other engines use it - if not Network.objects.filter(sha256=sha256): - FileSystemStorage().delete(sha256) - - # Report this, and refer to the Engine specific view - return OpenBench.views.redirect(request, '/networks/%s/' % (engine), status=status) + if success: + return OpenBench.views.redirect(request, '/networks/%s/' % (engine), status=message) + else: + return OpenBench.views.redirect(request, '/networks/%s/' % (engine), error=message) def network_download(request, engine, network): @@ -396,6 +399,41 @@ def network_edit(request, engine, network): return OpenBench.views.redirect(request, '/networks/%s' % (network.engine), status='Applied changes') +def notify_webhook(request, test_id): + test = Test.objects.get(id=test_id) + with open('webhooks.json') as webhooks: + webhooks = json.load(webhooks) + # If the test author does not have a webhook, exit now + if test.author.lower() not in webhooks: + return + + # Fetch the specific webhook for this test author + webhook = webhooks[test.author.lower()] + + # Compute stats + lower, elo, upper = OpenBench.stats.Elo(test.results()) + error = max(upper - elo, elo - lower) + elo = OpenBench.templatetags.mytags.twoDigitPrecision(elo) + error = OpenBench.templatetags.mytags.twoDigitPrecision(error) + outcome = 'passed' if test.passed else 'failed' + + # Green if passing, red if failing. + color = 0xFEFF58 + if test.passed: + color = 0x37F769 + elif test.wins < test.losses: + color = 0xFA4E4E + + return requests.post(webhook, json={ + 'username': test.dev_engine, + 'embeds': [{ + 'author': { 'name': test.author }, + 'title': f'Test `{test.dev.name}` vs `{test.base.name}` {outcome}', + 'url': request.build_absolute_uri(f'/test/{test_id}'), + 'color': color, + 'description': f'```\n{longStatBlock(test)}\n```', + }] + }) def update_test(request, machine): @@ -502,5 +540,9 @@ def update_test(request, machine): Machine.objects.filter(id=machine_id).update( updated=timezone.now() ) + + # Send update to webhook, if it exists + if test.finished and os.path.exists('webhooks.json'): + notify_webhook(request, test_id) return [{}, { 'stop' : True }][test.finished] diff --git a/OpenBench/views.py b/OpenBench/views.py index 838feb72..9151f13d 100644 --- a/OpenBench/views.py +++ b/OpenBench/views.py @@ -26,6 +26,7 @@ import OpenBench.config import OpenBench.utils +import OpenBench.model_utils from OpenBench.workloads.create_workload import create_workload from OpenBench.workloads.get_workload import get_workload @@ -33,7 +34,7 @@ from OpenBench.workloads.verify_workload import verify_workload from OpenBench.workloads.view_workload import view_workload -from OpenBench.config import OPENBENCH_CONFIG, OPENBENCH_STATIC_VERSION +from OpenBench.config import OPENBENCH_CONFIG, OPENBENCH_CONFIG_CHECKSUM, OPENBENCH_STATIC_VERSION from OpenSite.settings import PROJECT_PATH from OpenBench.models import * @@ -268,7 +269,7 @@ def index(request, page=1): completed = OpenBench.utils.get_completed_tests() awaiting = OpenBench.utils.get_awaiting_tests() - start, end, paging = OpenBench.utils.getPaging(completed, page, 'index') + start, end, paging = OpenBench.utils.getPaging(completed, int(page), 'index') data = { 'pending' : pending, @@ -288,7 +289,7 @@ def user(request, username, page=1): completed = OpenBench.utils.get_completed_tests().filter(author=username) awaiting = OpenBench.utils.get_awaiting_tests().filter(author=username) - start, end, paging = OpenBench.utils.getPaging(completed, page, 'user/%s' % (username)) + start, end, paging = OpenBench.utils.getPaging(completed, int(page), 'user/%s' % (username)) data = { 'pending' : pending, @@ -304,7 +305,7 @@ def user(request, username, page=1): def greens(request, page=1): completed = OpenBench.utils.get_completed_tests().filter(passed=True) - start, end, paging = OpenBench.utils.getPaging(completed, page, 'greens') + start, end, paging = OpenBench.utils.getPaging(completed, int(page), 'greens') data = { 'completed' : completed[start:end], 'paging' : paging } return render(request, 'index.html', data) @@ -420,10 +421,10 @@ def users(request): data = { 'profiles' : Profile.objects.order_by('-games', '-tests') } return render(request, 'users.html', data) -def event(request, id): +def event(request, pk): try: - with open(os.path.join(MEDIA_ROOT, LogEvent.objects.get(id=id).log_file)) as fin: + with open(os.path.join(MEDIA_ROOT, LogEvent.objects.get(id=pk).log_file)) as fin: return render(request, 'event.html', { 'content' : fin.read() }) except: return redirect(request, '/index/', error='No logs for event exist') @@ -431,7 +432,7 @@ def event(request, id): def events_actions(request, page=1): events = LogEvent.objects.all().filter(machine_id=0).order_by('-id') - start, end, paging = OpenBench.utils.getPaging(events, page, 'events') + start, end, paging = OpenBench.utils.getPaging(events, int(page), 'events') data = { 'events' : events[start:end], 'paging' : paging }; return render(request, 'events.html', data) @@ -439,19 +440,19 @@ def events_actions(request, page=1): def events_errors(request, page=1): events = LogEvent.objects.all().exclude(machine_id=0).order_by('-id') - start, end, paging = OpenBench.utils.getPaging(events, page, 'errors') + start, end, paging = OpenBench.utils.getPaging(events, int(page), 'errors') data = { 'events' : events[start:end], 'paging' : paging }; return render(request, 'errors.html', data) -def machines(request, machineid=None): +def machines(request, pk=None): - if machineid == None: + if pk == None: data = { 'machines' : OpenBench.utils.getRecentMachines() } return render(request, 'machines.html', data) try: - data = { 'machine' : OpenBench.models.Machine.objects.get(id=machineid) } + data = { 'machine' : OpenBench.models.Machine.objects.get(id=int(pk)) } return render(request, 'machine.html', data) except: @@ -462,74 +463,26 @@ def machines(request, machineid=None): # TEST MANAGEMENT VIEWS # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -def test(request, id, action=None): +def workload(request, workload_type, pk, action=None): - # Request is to modify or interact with the Test if action != None: - return modify_workload(request, id, action) + return modify_workload(request, pk, action) - # Verify that the Test id exists - if not (test := Test.objects.filter(id=id).first()): - return redirect(request, '/index/', error='No such Test exists') + if not (workload := Test.objects.filter(id=int(pk)).first()): + return redirect(request, '/index/', error='No such Workload exists') - # Verify that it is indeed a Test and not a Tune - if test.test_mode == 'TUNE': - return redirect(request, '/tune/%d' % (id)) + # Trying to view a Tune as a Test, for example + if workload.workload_type_str() != workload_type: + return django.http.HttpResponseRedirect('/%s/%d/' % (workload.workload_type_str(), int(pk))) - # Verify that it is indeed a Test and not Datagen - if test.test_mode == 'DATAGEN': - return redirect(request, '/datagen/%d' % (id)) + return view_workload(request, workload, workload_type.upper()) - return view_workload(request, test, 'TEST') +def new_workload(request, workload_type): -def tune(request, id, action=None): + if workload_type.upper() not in [ 'TEST', 'TUNE', 'DATAGEN' ]: + return redirect(request, '/index/', error='Unknown Workload type') - # Request is to modify or interact with the Tune - if action != None: - return modify_workload(request, id, action) - - # Verify that the Tune id exists - if not (tune := Test.objects.filter(id=id).first()): - return redirect(request, '/index/', error='No such Tune exists') - - # Verify that it is indeed a Tune and not a Test - if tune.test_mode == 'SPRT' or tune.test_mode == 'GAMES': - return redirect(request, '/test/%d' % (id)) - - # Verify that it is indeed a Tune and not Datagen - if tune.test_mode == 'DATAGEN': - return redirect(request, '/datagen/%d' % (id)) - - return view_workload(request, tune, 'TUNE') - -def datagen(request, id, action=None): - - # Request is to modify or interact with the Datagen - if action != None: - return modify_workload(request, id, action) - - # Verify that the Datagen id exists - if not (datagen := Test.objects.filter(id=id).first()): - return redirect(request, '/index/', error='No such Datagen exists') - - # Verify that it is indeed a Datagen and not a Tune - if datagen.test_mode == 'TUNE': - return redirect(request, '/tune/%d' % (id)) - - # Verify that it is indeed a Datagen and not a Test - if datagen.test_mode == 'SPRT' or datagen.test_mode == 'GAMES': - return redirect(request, '/test/%d' % (id)) - - return view_workload(request, datagen, 'DATAGEN') - -def create_test(request): - return create_workload(request, 'TEST') - -def create_tune(request): - return create_workload(request, 'TUNE') - -def create_datagen(request): - return create_workload(request, 'DATAGEN') + return create_workload(request, workload_type.upper()) # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # NETWORK MANAGEMENT VIEWS # @@ -600,7 +553,7 @@ def scripts(request): return networks(request, engine, 'upload', name) if request.POST['action'] == 'CREATE_TEST': - return create_test(request) + return new_workload(request, "TEST") # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # CLIENT HOOK VIEWS # @@ -623,6 +576,10 @@ def wrapped_verify_worker(*args, **kwargs): if machine.secret != args[0].POST['secret']: return JsonResponse({ 'error' : 'Invalid Secret Token' }) + # Prompt the worker to soft-restart if its config is out of date + if machine.info.get('OPENBENCH_CONFIG_CHECKSUM') != OPENBENCH_CONFIG_CHECKSUM: + return JsonResponse({ 'error' : 'Server Configuration Changed' }) + # Otherwise, carry on, and pass along the machine return function(*args, machine) @@ -667,14 +624,17 @@ def client_worker_info(request): info = json.loads(request.POST['system_info']) machine = OpenBench.utils.get_machine(info['machine_id'], user, info) - # Indicate invalid request + # Provided an invalid machine_id, but just create a new machine if not machine: - return JsonResponse({ 'error' : 'Bad Machine Id' }) + machine = OpenBench.utils.get_machine('None', user, info) # Save the machine's latest information and Secret Token for this session machine.info = info machine.secret = secrets.token_hex(32) + # Note the Config checksum at the time of init, in case it changes + machine.info['OPENBENCH_CONFIG_CHECKSUM'] = OPENBENCH_CONFIG_CHECKSUM + # Tag engines that the Machine can build and/or run with binaries machine.info['supported'] = [] for engine, data in OPENBENCH_CONFIG['engines'].items(): @@ -867,17 +827,14 @@ def api_networks(request, engine): if engine in OPENBENCH_CONFIG['engines'].keys(): - if not (network := Network.objects.filter(engine=engine, default=True).first()): - return api_response({ 'error' : 'Engine does not have a default Network' }) - - default = { - 'sha' : network.sha256, 'name' : network.name, - 'author' : network.author, 'created' : str(network.created) } + default = None + if (network := Network.objects.filter(engine=engine, default=True).first()): + default = OpenBench.model_utils.network_to_dict(network) networks = [ - { 'sha' : network.sha256, 'name' : network.name, - 'author' : network.author, 'created' : str(network.created) } - for network in Network.objects.filter(engine=engine) ] + OpenBench.model_utils.network_to_dict(network) + for network in Network.objects.filter(engine=engine) + ] return api_response({ 'default' : default, 'networks' : networks }) @@ -901,6 +858,21 @@ def api_network_download(request, engine, identifier): return api_response({ 'error' : 'Engine not found. Check /api/config/ for a full list' }) +@csrf_exempt +def api_network_delete(request, engine, identifier): + + if not api_authenticate(request): + return api_response({ 'error' : 'API requires authentication for this server' }) + + if not api_authenticate(request, require_enabled=True): + return api_response({ 'error' : 'API requires authentication for this endpoint' }) + + if not (network := OpenBench.utils.network_disambiguate(engine, identifier)): + return api_response({ 'error' : 'Network %s for Engine %s not found' % (identifier, engine) }) + + message, success = OpenBench.model_utils.network_delete(network) + return api_response({ 'success' if success else 'error' : message }) + @csrf_exempt def api_build_info(request): @@ -928,14 +900,31 @@ def api_build_info(request): @csrf_exempt def api_pgns(request, pgn_id): + # 0. Make sure the request has the correct permissions if not api_authenticate(request): return api_response({ 'error' : 'API requires authentication for this server' }) - # Possible to request a PGN that does not exist + # 1. Make sure the workload actually exists for the requested PGN + try: workload = Test.objects.get(pk=pgn_id) + except: return api_response({ 'error' : 'Requested Workload Id does not exist' }) + + # 2. Make sure there actually is a PGN attached to the Workload pgn_path = FileSystemStorage('Media/PGNs').path('%d.pgn.tar' % (pgn_id)) if not os.path.exists(pgn_path): return api_response({ 'error' : 'Unable to find PGN for Workload #%d' % (pgn_id) }) + # 3. Make sure the workload is not currently running + if not workload.finished: + return api_response({ 'error' : 'PGNs cannot be downloaded while the Workload is active' }) + + # 4. Make sure no active workers are still on this workload + if OpenBench.utils.getRecentMachines().filter(workload=pgn_id): + return api_response({ 'error' : 'Some machines are still on this Workload. Try again shortly' }) + + # 5. Make sure there are no pending .pgn.bz2 files to be processed + if PGN.objects.filter(test_id=pgn_id).filter(processed=False): + return api_response({ 'error' : 'Still processing individual PGNs into the archive. Try again shortly' }) + # Craft the download HTML response fwrapper = FileWrapper(open(pgn_path, 'rb'), 8192) response = FileResponse(fwrapper, content_type='application/octet-stream') @@ -951,4 +940,4 @@ def api_pgns(request, pgn_id): # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # def buyEthereal(request): - return render(request, 'buyEthereal.html') \ No newline at end of file + return render(request, 'buyEthereal.html') diff --git a/OpenBench/watcher.py b/OpenBench/watcher.py index 5f07b72e..99420575 100644 --- a/OpenBench/watcher.py +++ b/OpenBench/watcher.py @@ -28,8 +28,14 @@ from OpenBench.utils import read_git_credentials from OpenBench.workloads.verify_workload import fetch_artifact_url +from django.db import OperationalError + class ArtifactWatcher(threading.Thread): + def __init__(self, stop_event, *args, **kwargs): + self.stop_event = stop_event + super().__init__(*args, **kwargs) + def update_test(self, test): # Public engines end their source with .zip. Private engines end @@ -59,10 +65,19 @@ def update_test(self, test): test.save() def run(self): - while True: - for test in get_awaiting_tests(): - try: self.update_test(test) - except: + + while not self.stop_event.wait(timeout=15): + + try: # Never exit on errors, to keep the watcher alive + for test in get_awaiting_tests(): + self.update_test(test) + + # Expect the database to be locked sometimes + except OperationalError as error: + if 'database is locked' not in str(error).lower(): traceback.print_exc() sys.stdout.flush() - time.sleep(15) + + except: # Totally unknown error + traceback.print_exc() + sys.stdout.flush() \ No newline at end of file diff --git a/OpenBench/workloads/create_workload.py b/OpenBench/workloads/create_workload.py index 7b194980..65f13a3e 100644 --- a/OpenBench/workloads/create_workload.py +++ b/OpenBench/workloads/create_workload.py @@ -57,21 +57,21 @@ def create_workload(request, workload_type): data['dev_text'] = 'Dev' data['dev_title_text'] = 'Dev' data['submit_text'] = 'Create Engine Test' - data['submit_endpoint'] = '/newTest/' + data['submit_endpoint'] = '/test/new/' if workload_type == 'TUNE': data['workload'] = workload_type data['dev_text'] = '' data['dev_title_text'] = 'Engine' data['submit_text'] = 'Create SPSA Tune' - data['submit_endpoint'] = '/newTune/' + data['submit_endpoint'] = '/tune/new/' if workload_type == 'DATAGEN': data['workload'] = workload_type data['dev_text'] = 'Dev' data['dev_title_text'] = 'Dev' data['submit_text'] = 'Create Datagen' - data['submit_endpoint'] = '/newDatagen/' + data['submit_endpoint'] = '/datagen/new/' return OpenBench.views.render(request, 'create_workload.html', data) @@ -85,7 +85,7 @@ def create_workload(request, workload_type): workload, errors = create_new_datagen(request) if errors != [] and errors != None: - paths = { 'TEST' : '/newTest/', 'TUNE' : '/newTune/', 'DATAGEN' : '/newDatagen/' } + paths = { 'TEST' : '/test/new/', 'TUNE' : '/tune/new/', 'DATAGEN' : '/datagen/new/' } return OpenBench.views.redirect(request, paths[workload_type], error='\n'.join(errors)) if warning := OpenBench.utils.branch_is_out_of_date(workload): @@ -122,6 +122,7 @@ def create_new_test(request): test.dev_options = request.POST['dev_options'] test.dev_network = request.POST['dev_network'] test.dev_time_control = OpenBench.utils.TimeControl.parse(request.POST['dev_time_control']) + test.dev_protocol = request.POST['dev_protocol'] test.base = get_engine(*base_ingo) test.base_repo = request.POST['base_repo'] @@ -129,6 +130,7 @@ def create_new_test(request): test.base_options = request.POST['base_options'] test.base_network = request.POST['base_network'] test.base_time_control = OpenBench.utils.TimeControl.parse(request.POST['base_time_control']) + test.base_protocol = request.POST['base_protocol'] test.workload_size = int(request.POST['workload_size']) test.priority = int(request.POST['priority']) @@ -139,6 +141,9 @@ def create_new_test(request): test.win_adj = request.POST['win_adj'] test.draw_adj = request.POST['draw_adj'] + test.scale_method = request.POST['scale_method'] + test.scale_nps = int(request.POST['scale_nps']) + test.test_mode = request.POST['test_mode'] test.awaiting = not (dev_has_all and base_has_all) @@ -187,6 +192,7 @@ def create_new_tune(request): test.dev_options = test.base_options = request.POST['dev_options'] test.dev_network = test.base_network = request.POST['dev_network'] test.dev_time_control = test.base_time_control = OpenBench.utils.TimeControl.parse(request.POST['dev_time_control']) + test.dev_protocol = test.base_protocol = request.POST['dev_protocol'] test.workload_size = int(request.POST['spsa_pairs_per']) test.priority = int(request.POST['priority']) @@ -197,6 +203,9 @@ def create_new_tune(request): test.win_adj = request.POST['win_adj'] test.draw_adj = request.POST['draw_adj'] + test.scale_method = request.POST['scale_method'] + test.scale_nps = int(request.POST['scale_nps']) + test.test_mode = 'SPSA' test.spsa = extract_spas_params(request) @@ -235,6 +244,7 @@ def create_new_datagen(request): test.dev_options = request.POST['dev_options'] test.dev_network = request.POST['dev_network'] test.dev_time_control = OpenBench.utils.TimeControl.parse(request.POST['dev_time_control']) + test.dev_protocol = request.POST['dev_protocol'] test.base = get_engine(*base_ingo) test.base_repo = request.POST['base_repo'] @@ -242,6 +252,7 @@ def create_new_datagen(request): test.base_options = request.POST['base_options'] test.base_network = request.POST['base_network'] test.base_time_control = OpenBench.utils.TimeControl.parse(request.POST['base_time_control']) + test.base_protocol = request.POST['base_protocol'] test.max_games = int(request.POST['datagen_max_games']) test.genfens_args = request.POST['datagen_custom_genfens'] @@ -256,6 +267,9 @@ def create_new_datagen(request): test.win_adj = request.POST['win_adj'] test.draw_adj = request.POST['draw_adj'] + test.scale_method = request.POST['scale_method'] + test.scale_nps = int(request.POST['scale_nps']) + test.test_mode = 'DATAGEN' test.awaiting = not (dev_has_all and base_has_all) diff --git a/OpenBench/workloads/get_workload.py b/OpenBench/workloads/get_workload.py index 8d3a49e5..b1dafba1 100644 --- a/OpenBench/workloads/get_workload.py +++ b/OpenBench/workloads/get_workload.py @@ -106,6 +106,10 @@ def filter_valid_workloads(request, machine): workloads = workloads.exclude(syzygy_adj='%d-MAN' % (K)) workloads = workloads.exclude(syzygy_wdl='%d-MAN' % (K)) + # Skip any workload using, or measuring, Time, for --noisy workers + if machine.info.get('noisy'): + workloads = [x for x in workloads if not OpenBench.utils.workload_uses_time_based_tc(x)] + # Skip workloads that we have insufficient threads to play options = [x for x in workloads if valid_hardware_assignment(x, machine)] @@ -179,6 +183,11 @@ def compute_resource_distribution(workloads, machine, has_focus): def workload_to_dictionary(test, result, machine): + # HACK: Remove this after a while, to avoid a complex DB migration + if test.scale_nps == 0: + test.scale_nps = OPENBENCH_CONFIG['engines'][test.base_engine]['nps'] + test.save() + workload = {} workload['result'] = { @@ -196,6 +205,8 @@ def workload_to_dictionary(test, result, machine): 'upload_pgns' : test.upload_pgns, 'genfens_args' : test.genfens_args, 'play_reverses' : test.play_reverses, + 'scale_method' : test.scale_method, + 'scale_nps' : test.scale_nps, } workload['test']['book'] = { @@ -215,9 +226,9 @@ def workload_to_dictionary(test, result, machine): 'network' : test.dev_network, 'netname' : test.dev_netname, 'time_control' : test.dev_time_control, - 'nps' : OPENBENCH_CONFIG['engines'][test.dev_engine]['nps'], 'build' : OPENBENCH_CONFIG['engines'][test.dev_engine]['build'], 'private' : OPENBENCH_CONFIG['engines'][test.dev_engine]['private'], + 'protocol' : test.dev_protocol, } workload['test']['base'] = { @@ -231,9 +242,9 @@ def workload_to_dictionary(test, result, machine): 'network' : test.base_network, 'netname' : test.base_netname, 'time_control' : test.base_time_control, - 'nps' : OPENBENCH_CONFIG['engines'][test.base_engine]['nps'], 'build' : OPENBENCH_CONFIG['engines'][test.base_engine]['build'], 'private' : OPENBENCH_CONFIG['engines'][test.base_engine]['private'], + 'protocol' : test.base_protocol, } workload['distribution'] = game_distribution(test, machine) diff --git a/OpenBench/workloads/verify_workload.py b/OpenBench/workloads/verify_workload.py index 16b8944a..4c9255af 100644 --- a/OpenBench/workloads/verify_workload.py +++ b/OpenBench/workloads/verify_workload.py @@ -80,6 +80,7 @@ def verify_test_creation(errors, request): (verify_options , 'dev_options', 'Threads', 'Dev Options'), (verify_options , 'dev_options', 'Hash', 'Dev Options'), (verify_time_control , 'dev_time_control', 'Dev Time Control'), + (verify_protocol , 'dev_protocol', 'Dev Protocol'), # Verify everything about the Base Engine (verify_configuration , 'base_engine', 'Base Engine', 'engines'), @@ -88,6 +89,7 @@ def verify_test_creation(errors, request): (verify_options , 'base_options', 'Threads', 'Base Options'), (verify_options , 'base_options', 'Hash', 'Base Options'), (verify_time_control , 'base_time_control', 'Base Time Control'), + (verify_protocol , 'base_protocol', 'Base Protocol'), # Verify everything about the Test Settings (verify_configuration , 'book_name', 'Book', 'books'), @@ -106,6 +108,11 @@ def verify_test_creation(errors, request): (verify_integer , 'workload_size', 'Workload Size'), (verify_greater_than , 'workload_size', 'Workload Size', 0), + # Verify the Scaling Mechanisms + (verify_scale_method , 'scale_method'), + (verify_integer , 'scale_nps', 'Scale NPS'), + (verify_greater_than , 'scale_nps', 'Scale NPS', 0), + # Verify everything about the Adjudicaton Settings (verify_syzygy_field , 'syzygy_adj', 'Syzygy Adjudication'), (verify_win_adj , 'win_adj'), @@ -141,6 +148,11 @@ def verify_tune_creation(errors, request): (verify_greater_than , 'throughput', 'Throughput', 0), (verify_syzygy_field , 'syzygy_wdl', 'Syzygy WDL'), + # Verify the Scaling Mechanisms + (verify_scale_method , 'scale_method'), + (verify_integer , 'scale_nps', 'Scale NPS'), + (verify_greater_than , 'scale_nps', 'Scale NPS', 0), + # Verify everything about the Adjudicaton Settings (verify_syzygy_field , 'syzygy_adj', 'Syzygy Adjudication'), (verify_win_adj , 'win_adj'), @@ -198,6 +210,11 @@ def verify_datagen_creation(errors, request): (verify_integer , 'workload_size', 'Workload Size'), (verify_greater_than , 'workload_size', 'Workload Size', 0), + # Verify the Scaling Mechanisms + (verify_scale_method , 'scale_method'), + (verify_integer , 'scale_nps', 'Scale NPS'), + (verify_greater_than , 'scale_nps', 'Scale NPS', 0), + # Verify everything about the Adjudicaton Settings (verify_syzygy_field , 'syzygy_adj', 'Syzygy Adjudication'), (verify_win_adj , 'win_adj'), @@ -232,6 +249,10 @@ def verify_time_control(errors, request, field, field_name): try: OpenBench.utils.TimeControl.parse(request.POST[field]) except: errors.append('{0} is not parsable'.format(field_name)) +def verify_protocol(errors, request, field, field_name): + try: assert request.POST[field].lower() == 'uci' or request.POST[field].lower() == 'xboard' + except: errors.append('{0} is not a valid protocol'.format(field_name)) + def verify_win_adj(errors, request, field): try: if (content := request.POST[field]) == 'None': return @@ -348,6 +369,12 @@ def verify_datagen_book(errors, request, field, field_name, parent): assert request.POST[field] in valid except: errors.append('{0} was neither NONE nor found in the configuration'.format(field_name)) +def verify_scale_method(errors, request, field): + try: assert(request.POST[field] in Test.ScaleMethod) + except: + choices = [f[0] for f in Test.ScaleMethod.choices] + errors.append('Unknown Scale Method. Expected one of {%s}.' % (', '.join(choices))) + def collect_github_info(errors, request, field): diff --git a/Scripts/archive2nps.py b/Scripts/archive2nps.py old mode 100644 new mode 100755 index 082fe216..b2e2434f --- a/Scripts/archive2nps.py +++ b/Scripts/archive2nps.py @@ -44,49 +44,100 @@ def pgn_header_list(lines): yield (headers, move_text) -def process_content(content, data, use_scale): +def process_content(content, data, result_id, use_scale): comment_regex = r'{(book|[+-]?M?\d+(?:\.\d+)? \d+/\d+ \d+ \d+)[^}]*}' for (headers, move_text) in pgn_iterator(content): - factor = float(headers['ScaleFactor']) if use_scale else 1.00 - - white = headers['White'].split('-')[-1] - black = headers['Black'].split('-')[-1] + factor = float(headers['ScaleFactor']) if use_scale else 1.00 + white = headers['White'].split('-')[-1] + black = headers['Black'].split('-')[-1] white_stm = 'FEN' not in headers or headers['FEN'].split()[1] == 'w' - data['games'] = data['games'] + 1 + # Setup to track stats per result-id + if result_id not in data: + data[result_id] = {} + # Setup to track stats for this result-id for engine in (white, black): - if engine not in data: - data[engine] = { 'nodes' : 0, 'time' : 0 } + if engine not in data[result_id]: + data[result_id][engine] = { 'nodes' : 0, 'time' : 0, 'games' : 0, 'ply' : 0 } + data[result_id][engine]['games'] += 1 for x in re.compile(comment_regex).findall(move_text): - if len(tokens := x.split()) == 4: - data[white if white_stm else black]['time'] += int(tokens[2]) / factor - data[white if white_stm else black]['nodes'] += int(tokens[3]) - + data[result_id][white if white_stm else black]['time'] += int(tokens[2]) / factor + data[result_id][white if white_stm else black]['nodes'] += int(tokens[3]) + data[result_id][white if white_stm else black]['ply'] += 1 white_stm = not white_stm +def report_verbose_stats(data): + + header = 'Result ID Games Dev Base ' + print (header) + print ('-' * len(header)) + + for result_id, stats in data.items(): + + games = stats['dev']['games'] + + dev_knps = stats['dev']['nodes'] / stats['dev']['time'] + base_knps = stats['base']['nodes'] / stats['base']['time'] + + print ('%5s %9d %7d knps %7d knps' % (result_id, games, dev_knps, base_knps)) + +def report_general_stats(data): + + games = 0 + dev_nodes = dev_time = dev_ply = 0 + base_nodes = base_time = base_ply = 0 + + for result_id, stats in data.items(): + + dev_nodes += stats['dev']['nodes'] + dev_time += stats['dev']['time'] + dev_ply += stats['dev']['ply'] + + base_nodes += stats['base']['nodes'] + base_time += stats['base']['time'] + base_ply += stats['base']['ply'] + + games += stats['dev']['games'] + assert stats['dev']['games'] == stats['base']['games'] + + dev_nps = dev_nodes / dev_time + base_nps = base_nodes / base_time + + dev_avg = dev_nodes // dev_ply + base_avg = base_nodes // base_ply + + print ('\nStats for Dev') + print ('-- Average KNPS | %.3f' % (dev_nps)) + print ('-- Average Nodes | %d' % (dev_avg)) + + print ('\nStats for Base') + print ('-- Average KNPS | %.3f' % (base_nps)) + print ('-- Average Nodes | %d' % (base_avg)) + if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('filename', help='Path to the OpenBench pgn archive') - parser.add_argument('--scale', help='Adjust based on ScaleFactor', action='store_true') + parser.add_argument('--scale' , help='Adjust based on ScaleFactor', action='store_true') + parser.add_argument('-v', '--verbose', help='Verbose reporting per machine', action='store_true') args = parser.parse_args() - data = { 'games' : 0 } + data = {} with tarfile.open(args.filename, 'r') as tar: for member in filter(lambda x: x.isfile(), tar.getmembers()): if file := tar.extractfile(member): - process_content(bz2.decompress(file.read()), data, args.scale) + test_id, result_id, seed, _, _ = member.name.split('.') + process_content(bz2.decompress(file.read()), data, result_id, args.scale) + + if args.verbose: + report_verbose_stats(data) + + report_general_stats(data) - dev_nps = 1000 * data['dev' ]['nodes'] / data['dev' ]['time'] - base_nps = 1000 * data['base']['nodes'] / data['base']['time'] - print ('Dev %d nps' % (int(dev_nps))) - print ('Base %d nps' % (int(base_nps))) - print ('Gain %.3f%%' % (100.0 * dev_nps / base_nps - 100.0)) - print ('%d games' % (data['games'])) diff --git a/Scripts/archive2pgns.py b/Scripts/archive2pgns.py old mode 100644 new mode 100755 diff --git a/Scripts/bench_all.py b/Scripts/bench_all.py old mode 100644 new mode 100755 diff --git a/Scripts/bench_engine.py b/Scripts/bench_engine.py index e27c3557..03da208a 100755 --- a/Scripts/bench_engine.py +++ b/Scripts/bench_engine.py @@ -33,10 +33,10 @@ if __name__ == '__main__': p = argparse.ArgumentParser() - p.add_argument('-E', '--engine' , help='Binary Name', required=True) - p.add_argument('-N', '--network' , help='Networks for Private Engines', required=False) - p.add_argument('-T', '--threads' , help='Concurrent Benchmarks', required=True, type=int) - p.add_argument('-S', '--sets' , help='Benchmark Sample Count', required=True, type=int) + p.add_argument('-E', '--engine' , help='Relative path to Binary', required=True) + p.add_argument('-N', '--network' , help='Relative path to Network for Private Engines', required=False) + p.add_argument('-T', '--threads' , help='Concurrent Benchmarks', required=True, type=int) + p.add_argument('-S', '--sets' , help='Benchmark Sample Count', required=True, type=int) args = p.parse_args() private = args.network != None diff --git a/Scripts/delete_networks.py b/Scripts/delete_networks.py new file mode 100755 index 00000000..2bf4068a --- /dev/null +++ b/Scripts/delete_networks.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# # +# OpenBench is a chess engine testing framework by Andrew Grant. # +# # +# # +# OpenBench is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# OpenBench is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +import argparse +import datetime +import os +import requests + +def url_join(*args): + # Join a set of URL paths while maintaining the correct format + return '/'.join([f.lstrip('/').rstrip('/') for f in args]) + '/' + + +def delete_network(args, network): + + # Not from the requested author + if network['author'] != args.author: + return + + # Network name does not contain the critical text + if args.contains and args.contains not in network['name']: + return + + # Server won't let us delete such networks + if network['default'] or network['was_default']: + return + + # Network is more recent than we are willing to erase + dt = datetime.datetime.fromisoformat(network['created']) + now = datetime.datetime.now(datetime.timezone.utc) + if (now - dt).days < int(args.days): + return + + if args.dry: + print ('Dry run... deleting %s' % (network['name'])) + + else: + url = url_join(args.server, 'api', 'networks', args.engine, network['name'], 'delete') + data = { 'username' : args.username, 'password' : args.password } + print (requests.post(url, data=data).json()) + +def delete_networks(): + + # We can use ENV variables for Username, Password, and Server + req_user = required=('OPENBENCH_USERNAME' not in os.environ) + req_pass = required=('OPENBENCH_PASSWORD' not in os.environ) + req_server = required=('OPENBENCH_SERVER' not in os.environ) + + help_user = 'Username. May also be passed as OPENBENCH_USERNAME environment variable' + help_pass = 'Password. May also be passed as OPENBENCH_PASSWORD environment variable' + help_server = ' Server. May also be passed as OPENBENCH_SERVER environment variable' + + p = argparse.ArgumentParser() + p.add_argument('-U', '--username', help=help_user , required=req_user ) + p.add_argument('-P', '--password', help=help_pass , required=req_pass ) + p.add_argument('-S', '--server' , help=help_server , required=req_server) + p.add_argument('-E', '--engine' , help='Engine' , required=True ) + p.add_argument('-A', '--author' , help='Network Author' , required=True ) + p.add_argument( '--days' , help='Delete iff N+ days old', required=True ) + p.add_argument( '--contains', help='Delete iif in name' , required=False ) + p.add_argument( '--dry' , help='Mock run' , action='store_true') + args = p.parse_args() + + # Fallback on ENV variables for Username, Password, and Server + args.username = args.username if args.username else os.environ['OPENBENCH_USERNAME'] + args.password = args.password if args.password else os.environ['OPENBENCH_PASSWORD'] + args.server = args.server if args.server else os.environ['OPENBENCH_SERVER' ] + + url = url_join(args.server, 'api', 'networks', args.engine) + data = { 'username' : args.username, 'password' : args.password } + nets = requests.post(url, data=data).json()['networks'] + + for network in nets: + delete_network(args, network) + +if __name__ == '__main__': + delete_networks() \ No newline at end of file diff --git a/Scripts/genfens_engine.py b/Scripts/genfens_engine.py new file mode 100755 index 00000000..8d6c2810 --- /dev/null +++ b/Scripts/genfens_engine.py @@ -0,0 +1,62 @@ +#!/bin/python3 + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# # +# OpenBench is a chess engine testing framework by Andrew Grant. # +# # +# # +# OpenBench is free software: you can redistribute it and/or modify # +# it under the terms of the GNU General Public License as published by # +# the Free Software Foundation, either version 3 of the License, or # +# (at your option) any later version. # +# # +# OpenBench is distributed in the hope that it will be useful, # +# but WITHOUT ANY WARRANTY; without even the implied warranty of # +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # +# GNU General Public License for more details. # +# # +# You should have received a copy of the GNU General Public License # +# along with this program. If not, see . # +# # +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +import argparse +import os +import sys +import random + +# Needed to include from ../Client/*.py +PARENT = os.path.join(os.path.dirname(__file__), os.path.pardir) +sys.path.append(os.path.abspath(os.path.join(PARENT, 'Client'))) + +import genfens + +if __name__ == '__main__': + + p = argparse.ArgumentParser() + p.add_argument('--engine' , help='Binary' , required=True ) + p.add_argument('--threads' , help='Threads to generate with' , required=True ) + p.add_argument('--count-per' , help='Openings to generate per thread', required=True ) + p.add_argument('--book-path' , help='Path to base Book, if any' , default='None') + p.add_argument('--extra' , help='Extra genfens arguments' , default='' ) + p.add_argument('--network' , help='Network, for Private Engines' , default=None ) + args = p.parse_args() + + # Same way that get_workload.py generates seeds + seeds = [random.randint(0, 2**31 - 1) for x in range(int(args.threads))] + + with open('example_genfens.epd', 'w') as fout: + + genfen_args = { + 'N' : int(args.count_per), + 'book' : args.book_path, + 'seeds' : seeds, + 'extra' : args.extra, + 'private' : args.network != None, + 'engine' : args.engine, + 'network' : args.network, + 'threads' : int(args.threads), + 'output' : fout, + } + + genfens.create_genfens_opening_book(genfen_args) diff --git a/Templates/OpenBench/base.html b/Templates/OpenBench/base.html index 34cbac90..c1fe6579 100644 --- a/Templates/OpenBench/base.html +++ b/Templates/OpenBench/base.html @@ -37,8 +37,8 @@ }; for (var i = 0; i < timestamps.length; i++) { - var date = new Date(1000 * timestamps[i].innerHTML); - timestamps[i].innerHTML = date.toLocaleString(undefined, options); + var date = new Date(1000 * timestamps[i].textContent); + timestamps[i].textContent = date.toLocaleString(undefined, options); } } @@ -51,11 +51,36 @@ }; for (var i = 0; i < datestamps.length; i++) { - var date = new Date(1000 * datestamps[i].innerHTML); - datestamps[i].innerHTML = date.toLocaleString(undefined, options); + var date = new Date(1000 * datestamps[i].textContent); + datestamps[i].textContent = date.toLocaleString(undefined, options); } } + { + const cells = document.querySelectorAll('.engine-options'); + + cells.forEach(cell => { + + // Only have Threads= and Hash= settings + if (cell.textContent.split(/\s+/).length <= 2) + return; + + // Only display Threads=X Hash=X ... + const re_threads = cell.textContent.match(/Threads=\d+/)[0]; + const re_hash = cell.textContent.match(/Hash=\d+/)[0]; + const new_text = re_threads + ' ' + re_hash + ' ...' + + // Add all options into the popup text + const popup = document.createElement('div'); + popup.classList.add('engine-options-popup'); + popup.innerHTML = cell.textContent.trim().replace(/\s+/g, '
'); + + // Prune text, and append popup + cell.textContent = new_text; + cell.appendChild(popup); + }); + } + }, false); function toggle_sidebar() { @@ -92,9 +117,9 @@ diff --git a/Templates/OpenBench/create_workload.html b/Templates/OpenBench/create_workload.html index bc439e1b..cdc4b271 100644 --- a/Templates/OpenBench/create_workload.html +++ b/Templates/OpenBench/create_workload.html @@ -8,7 +8,8 @@ {{ networks|json_script:"json-networks" }} {{ profile.repos|json_script:"json-repos" }} - + +