-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathconvert_snapshot_to_sql.py
More file actions
159 lines (127 loc) · 5.14 KB
/
convert_snapshot_to_sql.py
File metadata and controls
159 lines (127 loc) · 5.14 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
"""
Convert dataset snapshots to SQL files for evaluation.
Automatically detects dataset type and chooses appropriate output format:
- Spider/Bird: Single JSON file with {question_id: sql}
- Spider2: Individual SQL files per instance_id
"""
import sys
sys.path.append(".")
import json
import argparse
from pathlib import Path
from typing import Optional
from app.logger import configure_logger, logger
def _default_json_output_path(snapshot_path: str) -> str:
snapshot = Path(snapshot_path)
if snapshot.suffix:
return str(snapshot.with_suffix(".json"))
return f"{snapshot_path}.json"
def _infer_output_format_from_snapshot(snapshot_path: str) -> str:
from app.dataset import load_dataset
dataset = load_dataset(snapshot_path)
first_item = dataset[0] if len(dataset) > 0 else None
return "sql_files" if first_item is not None and hasattr(first_item, "instance_id") else "json"
def convert_to_json_file(snapshot_path: str, output_path: Optional[str] = None):
"""
Convert a dataset snapshot to a single JSON file (for Spider/Bird datasets).
Format: {question_id: sql_string}
"""
from app.dataset import load_dataset
dataset = load_dataset(snapshot_path)
data = {}
for item in dataset:
final_sql = item.final_selected_sql
if final_sql is None:
logger.warning(f"Item {item.question_id}: No valid SQL found, using 'Error'")
final_sql = "Error"
data[str(item.question_id)] = final_sql.strip()
if output_path is None:
output_path = _default_json_output_path(snapshot_path)
with open(output_path, "w", encoding="utf-8") as f:
json.dump(data, f, indent=4)
logger.info("Dataset converted to JSON file successfully")
logger.info(f"Output: {output_path}")
logger.info(f"Total items: {len(dataset)}")
def convert_to_sql_files(snapshot_path: str, output_dir: Optional[str] = None):
"""
Convert a dataset snapshot to individual SQL files (for Spider2 datasets).
Format: One SQL file per instance_id.
"""
from app.dataset import load_dataset
dataset = load_dataset(snapshot_path)
if output_dir is None:
output_dir = str(Path(snapshot_path).parent / "sql_output")
output_path = Path(output_dir)
output_path.mkdir(parents=True, exist_ok=True)
for item in dataset:
instance_id = getattr(item, "instance_id", None) or str(item.question_id)
final_sql = item.final_selected_sql
if final_sql is None:
logger.warning(f"Item {instance_id}: No valid SQL found, using 'Error'")
final_sql = "Error"
sql_file_path = output_path / f"{instance_id}.sql"
with open(sql_file_path, "w", encoding="utf-8") as f:
f.write(final_sql.strip())
logger.info("Dataset converted to SQL files successfully")
logger.info(f"Output directory: {output_dir}")
logger.info(f"Total items: {len(dataset)}")
def auto_convert(
snapshot_path: str,
dataset_type: Optional[str] = None,
output_path: Optional[str] = None,
force_format: Optional[str] = None,
):
if force_format is not None:
output_format = force_format
logger.info(f"Using forced format: {output_format}")
else:
if dataset_type is None:
output_format = _infer_output_format_from_snapshot(snapshot_path)
logger.info(f"Auto-detected format '{output_format}' from snapshot contents")
else:
output_format = "sql_files" if dataset_type == "spider2" else "json"
logger.info(f"Auto-detected format '{output_format}' for dataset type '{dataset_type}'")
if output_format == "json":
convert_to_json_file(snapshot_path, output_path)
elif output_format == "sql_files":
convert_to_sql_files(snapshot_path, output_path)
else:
raise ValueError(f"Invalid output format: {output_format}")
def main():
parser = argparse.ArgumentParser(
description="Convert dataset snapshots to SQL files (auto-detects format based on dataset type)"
)
parser.add_argument(
"--snapshot_path",
type=str,
default=None,
help="Path to the dataset snapshot. Default: use config sql_selection save_path",
)
parser.add_argument(
"--output",
type=str,
default=None,
help="Output path (JSON file) or directory (SQL files). Default: auto-determined",
)
parser.add_argument(
"--format",
type=str,
choices=["json", "sql_files"],
default=None,
help="Force output format (json or sql_files). If not specified, auto-detects from dataset type or snapshot contents",
)
args = parser.parse_args()
snapshot_path = args.snapshot_path
if snapshot_path is None:
from app.config import get_config
app_config = get_config()
configure_logger(app_config.logger_config.print_level)
snapshot_path = app_config.sql_selection_config.save_path
logger.info(f"Converting dataset snapshot {snapshot_path}")
auto_convert(
snapshot_path=snapshot_path,
output_path=args.output,
force_format=args.format,
)
if __name__ == "__main__":
main()