Skip to content

Commit 677a01e

Browse files
update code for case of no lora adapter
1 parent 5cd12ba commit 677a01e

File tree

1 file changed

+4
-3
lines changed

1 file changed

+4
-3
lines changed

src/engine.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -134,26 +134,27 @@ async def _initialize_engines(self):
134134
if lora_modules is not None:
135135
try:
136136
lora_modules = json.loads(lora_modules)
137-
lora_modules = LoRAModulePath(**lora_modules)
137+
lora_modules = [LoRAModulePath(**lora_modules)]
138138
except:
139139
lora_modules = None
140140

141141

142+
142143
self.chat_engine = OpenAIServingChat(
143144
engine_client=self.llm,
144145
model_config=self.model_config,
145146
base_model_paths=self.base_model_paths,
146147
response_role=self.response_role,
147148
chat_template=self.tokenizer.tokenizer.chat_template,
148-
lora_modules=[lora_modules],
149+
lora_modules=lora_modules,
149150
prompt_adapters=None,
150151
request_logger=None
151152
)
152153
self.completion_engine = OpenAIServingCompletion(
153154
engine_client=self.llm,
154155
model_config=self.model_config,
155156
base_model_paths=self.base_model_paths,
156-
lora_modules=[lora_modules],
157+
lora_modules=lora_modules,
157158
prompt_adapters=None,
158159
request_logger=None
159160
)

0 commit comments

Comments
 (0)