@@ -181,7 +181,7 @@ install-wheel-from-presigned = "llama_stack.cli.scripts.run:install_wheel_from_p
181181
182182[tool .setuptools .packages .find ]
183183where = [" src" ]
184- include = [" llama_stack" , " llama_stack.*" , " llama-stack-api " , " llama-stack-api .*" ]
184+ include = [" llama_stack" , " llama_stack.*" , " llama_stack_api " , " llama_stack_api .*" ]
185185
186186[[tool .uv .index ]]
187187name = " pytorch-cpu"
@@ -191,7 +191,7 @@ explicit = true
191191[tool .uv .sources ]
192192torch = [{ index = " pytorch-cpu" }]
193193torchvision = [{ index = " pytorch-cpu" }]
194- llama-stack-api = [{ path = " src/llama-stack-api " , editable = true }]
194+ llama-stack-api = [{ path = " src/llama_stack_api " , editable = true }]
195195
196196[tool .ruff ]
197197line-length = 120
@@ -258,7 +258,7 @@ unfixable = [
258258] # Using import * is acceptable (or at least tolerated) in an __init__.py of a package API
259259
260260[tool .mypy ]
261- mypy_path = [" src" , " src/llama-stack-api " ]
261+ mypy_path = [" src" ]
262262packages = [" llama_stack" , " llama_stack_api" ]
263263plugins = [' pydantic.mypy' ]
264264disable_error_code = []
@@ -281,14 +281,12 @@ exclude = [
281281 " ^src/llama_stack/core/store/registry\\ .py$" ,
282282 " ^src/llama_stack/core/utils/exec\\ .py$" ,
283283 " ^src/llama_stack/core/utils/prompt_for_config\\ .py$" ,
284- # Moved to llama-stack-api but still excluded
285284 " ^src/llama_stack/models/llama/llama3/interface\\ .py$" ,
286285 " ^src/llama_stack/models/llama/llama3/tokenizer\\ .py$" ,
287286 " ^src/llama_stack/models/llama/llama3/tool_utils\\ .py$" ,
288287 " ^src/llama_stack/models/llama/llama3/generation\\ .py$" ,
289288 " ^src/llama_stack/models/llama/llama3/multimodal/model\\ .py$" ,
290289 " ^src/llama_stack/models/llama/llama4/" ,
291- " ^src/llama-stack-api/llama_stack_api/core/telemetry/telemetry\\ .py$" ,
292290 " ^src/llama_stack/providers/inline/agents/meta_reference/" ,
293291 " ^src/llama_stack/providers/inline/datasetio/localfs/" ,
294292 " ^src/llama_stack/providers/inline/eval/meta_reference/eval\\ .py$" ,
@@ -342,9 +340,7 @@ exclude = [
342340 " ^src/llama_stack/providers/utils/telemetry/dataset_mixin\\ .py$" ,
343341 " ^src/llama_stack/providers/utils/telemetry/trace_protocol\\ .py$" ,
344342 " ^src/llama_stack/providers/utils/telemetry/tracing\\ .py$" ,
345- " ^src/llama-stack-api/llama_stack_api/core/telemetry/trace_protocol\\ .py$" ,
346- " ^src/llama-stack-api/llama_stack_api/core/telemetry/tracing\\ .py$" ,
347- " ^src/llama-stack-api/llama_stack_api/strong_typing/auxiliary\\ .py$" ,
343+ " ^src/llama_stack_api/strong_typing/auxiliary\\ .py$" ,
348344 " ^src/llama_stack/distributions/template\\ .py$" ,
349345]
350346
0 commit comments