From 1986120edf338a5249e45d95740d19be73e3d1f6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A1=D0=BC=D0=B5=D1=80=D0=B4=D0=BE=D0=BA=D1=80=D1=8B?= =?UTF-8?q?=D0=BB?= Date: Sat, 11 Feb 2023 10:07:50 +0200 Subject: [PATCH 1/4] DBD to SQL converter script --- code/Python/.gitignore | 2 + code/Python/dbd_to_sql.py | 138 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 code/Python/.gitignore create mode 100755 code/Python/dbd_to_sql.py diff --git a/code/Python/.gitignore b/code/Python/.gitignore new file mode 100644 index 00000000000..28fe4efff99 --- /dev/null +++ b/code/Python/.gitignore @@ -0,0 +1,2 @@ +__pycache__ +dbds.sql diff --git a/code/Python/dbd_to_sql.py b/code/Python/dbd_to_sql.py new file mode 100755 index 00000000000..0d81d8f2a6a --- /dev/null +++ b/code/Python/dbd_to_sql.py @@ -0,0 +1,138 @@ +#!/usr/bin/env python3 + +import dbd +import os +from argparse import ArgumentParser +from glob import glob + +script_dir:str = os.path.dirname(os.path.abspath(__file__)); + +parser = ArgumentParser(); +group = parser.add_mutually_exclusive_group(); +group.add_argument('--layout', type=str, help="target layout, e.g. '90747013'"); +group.add_argument('--build', type=str, help="target build, e.g. '10.0.0.43342'"); +parser.add_argument('dbds', type=str, nargs='*', help='directory with / list of for dbd files to process'); +parser.add_argument('--output', type=str, default=os.path.join(script_dir, 'dbds.sql'), help='file or directory to dump sql to'); +args = parser.parse_args(); + +dbds:list[str] = args.dbds or os.path.join( + os.path.dirname( # WoWDBDefs/ + os.path.dirname( # code/ + script_dir # Python/ + )), + 'definitions' +); +if not dbds[0].endswith(dbd.file_suffix): + dbds = glob(os.path.join(dbds[0], '*.dbd')); + +print(f"Found {len(dbds)} definitions to process"); + +outfile:str = args.output; +outdir:str = ''; +if outfile.endswith('.sql'): + with open(outfile, 'w') as file: + file.write("SET SESSION FOREIGN_KEY_CHECKS=0;\n"); +else: + if not os.path.isdir(outfile): + os.makedirs(outfile); + outdir = outfile; + outfile = None; + +print(f"Outputting to {outdir or outfile}"); + +def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: + type = { + 'uint' : 'int', + #'int' : 'int', + 'locstring' : 'text', + 'string' : 'text', + #'float' : 'float' + }.get(type, type); + + default = { + 'int' : '0', + 'text' : "''", + 'float' : '0.0' + }.get(type, 'NULL'); + + type = { + 8 : 'tinyint', + 16 : 'smallint', + 32 : 'mediumint', + 64 : 'bigint' + }.get(int_width, type); + + if is_unsigned: + type += ' unsigned'; + + return f"{type} DEFAULT {default}"; + +file:str +for file in dbds: + parsed:dbd.dbd_file = dbd.parse_dbd_file(file); + if not len(parsed.definitions): + print(f"No definitions found in {file}! Skipping"); + continue; + + types:dict[str,str] = {}; + foreigns:dict[str,str] = {}; + column:dbd.column_definition + for column in parsed.columns: + types[column.name] = column.type; + if column.foreign: + foreigns[column.name] = f"FOREIGN KEY (`{column.name}`) REFERENCES `{column.foreign.table}` (`{column.foreign.column}`) ON DELETE NO ACTION ON UPDATE NO ACTION"; + + definition:dbd.definitions = None; + if args.layout: + definition = next(defn for defn in parsed.definitions if args.layout in defn.layouts); + if not definition: + print(f"No definition found for layout {args.layout}! Skipping"); + continue; + elif args.build: + definition = next(defn for defn in parsed.definitions if args.build in defn.builds); + + if not definition: + definition = max(parsed.definitions, key = + lambda defn: max(getattr(build, 'build', getattr(build[-1], 'build', 0)) for build in defn.builds) + ); + + name:str = os.path.splitext(os.path.basename(file))[0]; + + # TODO: include comments in sql + columns:list[str] = []; + indices:list[str] = []; + fkeys:list[str] = []; + entry:dbd.definition_entry + for entry in definition.entries: + column = f"`{entry.column}` {get_sql_type(types.get(entry.column))}"; + if 'id' in entry.annotation: + column += ' PRIMARY KEY'; + elif entry.column in foreigns.keys(): + fkeys.append(foreigns.get(entry.column)); + elif 'relation' in entry.annotation: + indices.append(f"INDEX (`{entry.column}`)"); + # TODO: Get self-referencing keys to work + #fkeys.append(f"FOREIGN KEY (`{entry.column}`) REFERENCES `{name}` (`{entry.column}`) ON DELETE NO ACTION ON UPDATE NO ACTION"); + + columns.append(column); + + fields:list[str] = [','.join(columns)]; + if len(indices): + fields.append(', '.join(indices)); + if len(fkeys): + fields.append(', '.join(fkeys)); + + stmt:str = f"CREATE OR REPLACE TABLE `{name}` ({', '.join(fields)})"; + + if outfile: + with open(outfile, 'a') as file: + file.write(f"{stmt};\n"); + elif outdir: + with open(os.path.join(outdir, f"{name}.sql"), 'w') as file: + file.write(stmt); + +if outfile: + with open(outfile, 'a') as file: + file.write("SET SESSION FOREIGN_KEY_CHECKS=1;\n"); + +print('Done.'); From ffd3b9c11cf97794873b81f50f77403f717f1c5a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A1=D0=BC=D0=B5=D1=80=D0=B4=D0=BE=D0=BA=D1=80=D1=8B?= =?UTF-8?q?=D0=BB?= Date: Sat, 11 Feb 2023 10:26:40 +0200 Subject: [PATCH 2/4] Complete get_sql_type() call i forgor --- code/Python/dbd_to_sql.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/code/Python/dbd_to_sql.py b/code/Python/dbd_to_sql.py index 0d81d8f2a6a..ad5537b5ed1 100755 --- a/code/Python/dbd_to_sql.py +++ b/code/Python/dbd_to_sql.py @@ -104,7 +104,7 @@ def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: fkeys:list[str] = []; entry:dbd.definition_entry for entry in definition.entries: - column = f"`{entry.column}` {get_sql_type(types.get(entry.column))}"; + column = f"`{entry.column}` {get_sql_type(types.get(entry.column), entry.int_width, entry.is_unsigned)}"; if 'id' in entry.annotation: column += ' PRIMARY KEY'; elif entry.column in foreigns.keys(): From aba9f9a4d39b750e7bc2231475e5347d20a33af7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A1=D0=BC=D0=B5=D1=80=D0=B4=D0=BE=D0=BA=D1=80=D1=8B?= =?UTF-8?q?=D0=BB?= Date: Sat, 11 Feb 2023 12:05:04 +0200 Subject: [PATCH 3/4] Process FK-referenced tables out of order, use their datatypes for referrer fields --- code/Python/dbd_to_sql.py | 53 +++++++++++++++++++++++++++++---------- 1 file changed, 40 insertions(+), 13 deletions(-) diff --git a/code/Python/dbd_to_sql.py b/code/Python/dbd_to_sql.py index ad5537b5ed1..a01bef71b91 100755 --- a/code/Python/dbd_to_sql.py +++ b/code/Python/dbd_to_sql.py @@ -3,6 +3,7 @@ import dbd import os from argparse import ArgumentParser +from collections import defaultdict from glob import glob script_dir:str = os.path.dirname(os.path.abspath(__file__)); @@ -67,27 +68,32 @@ def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: return f"{type} DEFAULT {default}"; -file:str -for file in dbds: +keys:dict[str, dict[str, str]] = defaultdict(dict); +def process_dbd(file:str)->bool: parsed:dbd.dbd_file = dbd.parse_dbd_file(file); if not len(parsed.definitions): print(f"No definitions found in {file}! Skipping"); - continue; + return False; + + dirname:str = os.path.dirname(file); + name:str = os.path.splitext(os.path.basename(file))[0]; + if keys.get(name, None): + return True; # Already processed types:dict[str,str] = {}; - foreigns:dict[str,str] = {}; + foreigns:dict[str,list[str]] = {}; column:dbd.column_definition for column in parsed.columns: types[column.name] = column.type; if column.foreign: - foreigns[column.name] = f"FOREIGN KEY (`{column.name}`) REFERENCES `{column.foreign.table}` (`{column.foreign.column}`) ON DELETE NO ACTION ON UPDATE NO ACTION"; + foreigns[column.name] = column.foreign; definition:dbd.definitions = None; if args.layout: definition = next(defn for defn in parsed.definitions if args.layout in defn.layouts); if not definition: print(f"No definition found for layout {args.layout}! Skipping"); - continue; + return False; elif args.build: definition = next(defn for defn in parsed.definitions if args.build in defn.builds); @@ -96,25 +102,41 @@ def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: lambda defn: max(getattr(build, 'build', getattr(build[-1], 'build', 0)) for build in defn.builds) ); - name:str = os.path.splitext(os.path.basename(file))[0]; - # TODO: include comments in sql columns:list[str] = []; indices:list[str] = []; fkeys:list[str] = []; entry:dbd.definition_entry for entry in definition.entries: - column = f"`{entry.column}` {get_sql_type(types.get(entry.column), entry.int_width, entry.is_unsigned)}"; + sql_type:str = get_sql_type(types.get(entry.column), entry.int_width, entry.is_unsigned); + suffix:str = ''; if 'id' in entry.annotation: - column += ' PRIMARY KEY'; - elif entry.column in foreigns.keys(): - fkeys.append(foreigns.get(entry.column)); + suffix = 'PRIMARY KEY'; + keys[name][entry.column] = sql_type; + elif (foreign := foreigns.get(entry.column, None)): + # TODO: unhack! + if not keys.get(foreign.table.string, {}).get(foreign.column.string, None): + foreign_dbd:str = next((f for f in dbds if os.path.basename(f) == f"{foreign.table}.dbd"), None); + if foreign_dbd: + if not process_dbd(foreign_dbd): + print(f"Could not process table {foreign.table} referenced by {name}.{entry.column}"); + return False; + if not foreign_dbd: + print(f"FK {name}.{entry.column} references {foreign.column} in {foreign.table} which was not supplied"); + + sql_type = keys[foreign.table.string].get(foreign.column.string, None) or sql_type; + fkeys.append( + f"FOREIGN KEY (`{entry.column}`) " + f"REFERENCES `{foreign.table}` (`{foreign.column}`) " + 'ON DELETE NO ACTION ON UPDATE NO ACTION' + ); elif 'relation' in entry.annotation: + keys[name][entry.column] = sql_type; indices.append(f"INDEX (`{entry.column}`)"); # TODO: Get self-referencing keys to work #fkeys.append(f"FOREIGN KEY (`{entry.column}`) REFERENCES `{name}` (`{entry.column}`) ON DELETE NO ACTION ON UPDATE NO ACTION"); - columns.append(column); + columns.append(f"`{entry.column}` {sql_type} {suffix}"); fields:list[str] = [','.join(columns)]; if len(indices): @@ -131,6 +153,11 @@ def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: with open(os.path.join(outdir, f"{name}.sql"), 'w') as file: file.write(stmt); + return True; + +for file in dbds: + process_dbd(file); + if outfile: with open(outfile, 'a') as file: file.write("SET SESSION FOREIGN_KEY_CHECKS=1;\n"); From 84b404bc59353c6d91a90ab0909f7d5d2d019f27 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=D0=A1=D0=BC=D0=B5=D1=80=D0=B4=D0=BE=D0=BA=D1=80=D1=8B?= =?UTF-8?q?=D0=BB?= Date: Sun, 12 Feb 2023 05:31:24 +0200 Subject: [PATCH 4/4] Output to stdout by default, log to stderr; remove separate file output mode --- code/Python/dbd_to_sql.py | 58 ++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 35 deletions(-) diff --git a/code/Python/dbd_to_sql.py b/code/Python/dbd_to_sql.py index a01bef71b91..366230cce03 100755 --- a/code/Python/dbd_to_sql.py +++ b/code/Python/dbd_to_sql.py @@ -5,41 +5,39 @@ from argparse import ArgumentParser from collections import defaultdict from glob import glob - -script_dir:str = os.path.dirname(os.path.abspath(__file__)); +from io import TextIOWrapper +from sys import stdout, stderr parser = ArgumentParser(); group = parser.add_mutually_exclusive_group(); group.add_argument('--layout', type=str, help="target layout, e.g. '90747013'"); group.add_argument('--build', type=str, help="target build, e.g. '10.0.0.43342'"); parser.add_argument('dbds', type=str, nargs='*', help='directory with / list of for dbd files to process'); -parser.add_argument('--output', type=str, default=os.path.join(script_dir, 'dbds.sql'), help='file or directory to dump sql to'); +parser.add_argument('--output', type=str, default=stdout, help='file to dump sql to'); args = parser.parse_args(); dbds:list[str] = args.dbds or os.path.join( os.path.dirname( # WoWDBDefs/ os.path.dirname( # code/ - script_dir # Python/ - )), + os.path.dirname( # Python/ + os.path.abspath(__file__) # ./dbd_to_sql.py + ))), 'definitions' ); if not dbds[0].endswith(dbd.file_suffix): dbds = glob(os.path.join(dbds[0], '*.dbd')); -print(f"Found {len(dbds)} definitions to process"); +outfile:TextIOWrapper = args.output; +if type(outfile) != TextIOWrapper: + outfile = open(outfile, 'a'); + +def log(*args, **kwargs)->None: + print(*args, file=stderr if outfile == stdout else stdout, **kwargs); -outfile:str = args.output; -outdir:str = ''; -if outfile.endswith('.sql'): - with open(outfile, 'w') as file: - file.write("SET SESSION FOREIGN_KEY_CHECKS=0;\n"); -else: - if not os.path.isdir(outfile): - os.makedirs(outfile); - outdir = outfile; - outfile = None; +log(f"Found {len(dbds)} definitions to process"); -print(f"Outputting to {outdir or outfile}"); +log(f"Outputting to {outfile}"); +outfile.write("SET SESSION FOREIGN_KEY_CHECKS=0;\n"); def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: type = { @@ -72,7 +70,7 @@ def get_sql_type(type:str, int_width:int=0, is_unsigned:bool=False)->str: def process_dbd(file:str)->bool: parsed:dbd.dbd_file = dbd.parse_dbd_file(file); if not len(parsed.definitions): - print(f"No definitions found in {file}! Skipping"); + log(f"No definitions found in {file}! Skipping"); return False; dirname:str = os.path.dirname(file); @@ -92,7 +90,7 @@ def process_dbd(file:str)->bool: if args.layout: definition = next(defn for defn in parsed.definitions if args.layout in defn.layouts); if not definition: - print(f"No definition found for layout {args.layout}! Skipping"); + log(f"No definition found for layout {args.layout}! Skipping"); return False; elif args.build: definition = next(defn for defn in parsed.definitions if args.build in defn.builds); @@ -119,10 +117,10 @@ def process_dbd(file:str)->bool: foreign_dbd:str = next((f for f in dbds if os.path.basename(f) == f"{foreign.table}.dbd"), None); if foreign_dbd: if not process_dbd(foreign_dbd): - print(f"Could not process table {foreign.table} referenced by {name}.{entry.column}"); + log(f"Could not process table {foreign.table} referenced by {name}.{entry.column}"); return False; if not foreign_dbd: - print(f"FK {name}.{entry.column} references {foreign.column} in {foreign.table} which was not supplied"); + log(f"FK {name}.{entry.column} references {foreign.column} in {foreign.table} which was not supplied"); sql_type = keys[foreign.table.string].get(foreign.column.string, None) or sql_type; fkeys.append( @@ -144,22 +142,12 @@ def process_dbd(file:str)->bool: if len(fkeys): fields.append(', '.join(fkeys)); - stmt:str = f"CREATE OR REPLACE TABLE `{name}` ({', '.join(fields)})"; - - if outfile: - with open(outfile, 'a') as file: - file.write(f"{stmt};\n"); - elif outdir: - with open(os.path.join(outdir, f"{name}.sql"), 'w') as file: - file.write(stmt); - + outfile.write(f"CREATE OR REPLACE TABLE `{name}` ({', '.join(fields)});\n"); return True; for file in dbds: process_dbd(file); -if outfile: - with open(outfile, 'a') as file: - file.write("SET SESSION FOREIGN_KEY_CHECKS=1;\n"); - -print('Done.'); +outfile.write("SET SESSION FOREIGN_KEY_CHECKS=1;\n"); +outfile.close(); +log('Done.');