5
5
from pathlib import Path
6
6
7
7
import cloudpickle
8
- import ujson
8
+ import orjson
9
9
10
10
from dspy .utils .saving import get_dependency_versions
11
11
@@ -153,8 +153,8 @@ def reset_copy(self):
153
153
154
154
return new_instance
155
155
156
- def dump_state (self ):
157
- return {name : param .dump_state () for name , param in self .named_parameters ()}
156
+ def dump_state (self , json_mode = True ):
157
+ return {name : param .dump_state (json_mode = json_mode ) for name , param in self .named_parameters ()}
158
158
159
159
def load_state (self , state ):
160
160
for name , param in self .named_parameters ():
@@ -169,10 +169,10 @@ def save(self, path, save_program=False, modules_to_serialize=None):
169
169
- `save_program=True`: Save the whole module to a directory via cloudpickle, which contains both the state and
170
170
architecture of the model.
171
171
172
- If `save_program=True` and `modules_to_serialize` are provided, it will register those modules for serialization
173
- with cloudpickle's `register_pickle_by_value`. This causes cloudpickle to serialize the module by value rather
174
- than by reference, ensuring the module is fully preserved along with the saved program. This is useful
175
- when you have custom modules that need to be serialized alongside your program. If None, then no modules
172
+ If `save_program=True` and `modules_to_serialize` are provided, it will register those modules for serialization
173
+ with cloudpickle's `register_pickle_by_value`. This causes cloudpickle to serialize the module by value rather
174
+ than by reference, ensuring the module is fully preserved along with the saved program. This is useful
175
+ when you have custom modules that need to be serialized alongside your program. If None, then no modules
176
176
will be registered for serialization.
177
177
178
178
We also save the dependency versions, so that the loaded model can check if there is a version mismatch on
@@ -215,24 +215,26 @@ def save(self, path, save_program=False, modules_to_serialize=None):
215
215
f"Saving failed with error: { e } . Please remove the non-picklable attributes from your DSPy program, "
216
216
"or consider using state-only saving by setting `save_program=False`."
217
217
)
218
- with open (path / "metadata.json" , "w" , encoding = "utf-8 " ) as f :
219
- ujson . dump ( metadata , f , indent = 2 , ensure_ascii = False )
218
+ with open (path / "metadata.json" , "wb " ) as f :
219
+ f . write ( orjson . dumps ( metadata , option = orjson . OPT_INDENT_2 | orjson . OPT_APPEND_NEWLINE ) )
220
220
221
221
return
222
222
223
- state = self .dump_state ()
224
- state ["metadata" ] = metadata
225
223
if path .suffix == ".json" :
224
+ state = self .dump_state ()
225
+ state ["metadata" ] = metadata
226
226
try :
227
- with open (path , "w" , encoding = "utf-8 " ) as f :
228
- f .write (ujson .dumps (state , indent = 2 , ensure_ascii = False ))
227
+ with open (path , "wb " ) as f :
228
+ f .write (orjson .dumps (state , option = orjson . OPT_INDENT_2 | orjson . OPT_APPEND_NEWLINE ))
229
229
except Exception as e :
230
230
raise RuntimeError (
231
231
f"Failed to save state to { path } with error: { e } . Your DSPy program may contain non "
232
232
"json-serializable objects, please consider saving the state in .pkl by using `path` ending "
233
233
"with `.pkl`, or saving the whole program by setting `save_program=True`."
234
234
)
235
235
elif path .suffix == ".pkl" :
236
+ state = self .dump_state (json_mode = False )
237
+ state ["metadata" ] = metadata
236
238
with open (path , "wb" ) as f :
237
239
cloudpickle .dump (state , f )
238
240
else :
@@ -248,8 +250,8 @@ def load(self, path):
248
250
path = Path (path )
249
251
250
252
if path .suffix == ".json" :
251
- with open (path , encoding = "utf-8 " ) as f :
252
- state = ujson .loads (f .read ())
253
+ with open (path , "rb " ) as f :
254
+ state = orjson .loads (f .read ())
253
255
elif path .suffix == ".pkl" :
254
256
with open (path , "rb" ) as f :
255
257
state = cloudpickle .load (f )
0 commit comments