Skip to content

Commit 419a4c3

Browse files
committed
feat(core): add llm_config and embedding_model_config to base op
1 parent 865ddd2 commit 419a4c3

File tree

1 file changed

+14
-8
lines changed

1 file changed

+14
-8
lines changed

flowllm/core/op/base_op.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,8 @@ def __init__(
165165
self.timer = Timer(name=self.name)
166166
self.context: FlowContext | None = None
167167
self._cache: CacheHandler | None = None
168+
self.llm_config: LLMConfig | None = None
169+
self.embedding_model_config: EmbeddingModelConfig | None = None
168170

169171
@property
170172
def short_name(self) -> str:
@@ -555,9 +557,9 @@ def llm(self) -> BaseLLM:
555557
BaseLLM instance configured from service config
556558
"""
557559
if isinstance(self._llm, str):
558-
llm_config: LLMConfig = C.service_config.llm[self._llm]
559-
llm_cls = C.get_llm_class(llm_config.backend)
560-
self._llm = llm_cls(model_name=llm_config.model_name, **llm_config.params)
560+
self.llm_config: LLMConfig = C.service_config.llm[self._llm]
561+
llm_cls = C.get_llm_class(self.llm_config.backend)
562+
self._llm = llm_cls(model_name=self.llm_config.model_name, **self.llm_config.params)
561563

562564
return self._llm
563565

@@ -569,11 +571,11 @@ def embedding_model(self) -> BaseEmbeddingModel:
569571
BaseEmbeddingModel instance configured from service config
570572
"""
571573
if isinstance(self._embedding_model, str):
572-
embedding_model_config: EmbeddingModelConfig = C.service_config.embedding_model[self._embedding_model]
573-
embedding_model_cls = C.get_embedding_model_class(embedding_model_config.backend)
574+
self.embedding_model_config: EmbeddingModelConfig = C.service_config.embedding_model[self._embedding_model]
575+
embedding_model_cls = C.get_embedding_model_class(self.embedding_model_config.backend)
574576
self._embedding_model = embedding_model_cls(
575-
model_name=embedding_model_config.model_name,
576-
**embedding_model_config.params,
577+
model_name=self.embedding_model_config.model_name,
578+
**self.embedding_model_config.params,
577579
)
578580

579581
return self._embedding_model
@@ -623,7 +625,11 @@ def token_count(self, messages: List[Message], tools: List[ToolCall] | None = No
623625
Returns:
624626
Total token count as an integer
625627
"""
626-
llm_config: LLMConfig = C.service_config.llm[self._llm]
628+
if self.llm_config is None:
629+
llm_config: LLMConfig = C.service_config.llm[self._llm]
630+
else:
631+
llm_config = self.llm_config
632+
627633
token_count_config: LLMTokenCountConfig = llm_config.token_count
628634

629635
token_count_cls = C.get_token_counter_class(token_count_config.backend)

0 commit comments

Comments
 (0)