Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 12 additions & 7 deletions openspace/host_detection/resolver.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,10 @@
_env_loaded = False


def _get_env_stripped(name: str) -> str:
return os.environ.get(name, "").strip()


def _load_env_once() -> None:
"""Load .env files once per process.

Expand Down Expand Up @@ -141,8 +145,8 @@ def build_llm_kwargs(model: str) -> tuple[str, Dict[str, Any]]:
source = "inherited env"

has_explicit_llm_override = bool(
os.environ.get("OPENSPACE_LLM_API_BASE")
or os.environ.get("OPENSPACE_LLM_API_KEY")
_get_env_stripped("OPENSPACE_LLM_API_BASE")
or _get_env_stripped("OPENSPACE_LLM_API_KEY")
)
provider_native_env_used = _has_provider_native_env(
resolved_model or _DEFAULT_MODEL
Expand Down Expand Up @@ -185,16 +189,16 @@ def build_llm_kwargs(model: str) -> tuple[str, Dict[str, Any]]:
source = host_source or "host config"

# --- Tier 1: explicit env vars override everything ---
api_key = os.environ.get("OPENSPACE_LLM_API_KEY")
api_key = _get_env_stripped("OPENSPACE_LLM_API_KEY")
if api_key:
kwargs["api_key"] = api_key
source = "OPENSPACE_LLM_* env"

api_base = os.environ.get("OPENSPACE_LLM_API_BASE")
api_base = _get_env_stripped("OPENSPACE_LLM_API_BASE")
if api_base:
kwargs["api_base"] = api_base

extra_headers_raw = os.environ.get("OPENSPACE_LLM_EXTRA_HEADERS")
extra_headers_raw = _get_env_stripped("OPENSPACE_LLM_EXTRA_HEADERS")
if extra_headers_raw:
try:
headers = json.loads(extra_headers_raw)
Expand All @@ -203,7 +207,7 @@ def build_llm_kwargs(model: str) -> tuple[str, Dict[str, Any]]:
except json.JSONDecodeError:
logger.warning("Invalid JSON in OPENSPACE_LLM_EXTRA_HEADERS: %r", extra_headers_raw)

llm_config_raw = os.environ.get("OPENSPACE_LLM_CONFIG")
llm_config_raw = _get_env_stripped("OPENSPACE_LLM_CONFIG")
if llm_config_raw:
try:
llm_config = json.loads(llm_config_raw)
Expand Down Expand Up @@ -322,4 +326,5 @@ def build_grounding_config_path() -> Optional[str]:
except Exception as e:
logger.warning("Failed to write config overrides: %s", e)

return os.environ.get("OPENSPACE_CONFIG_PATH")
config_path = _get_env_stripped("OPENSPACE_CONFIG_PATH")
return config_path or None
36 changes: 36 additions & 0 deletions tests/test_host_detection_resolver.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import os
import unittest
from unittest.mock import patch

from openspace.host_detection.resolver import build_llm_kwargs, build_grounding_config_path


class ResolverWhitespaceEnvTests(unittest.TestCase):
@patch("openspace.host_detection.nanobot.try_read_nanobot_config")
@patch("openspace.host_detection.openclaw.try_read_openclaw_config")
def test_whitespace_explicit_llm_env_does_not_disable_host_fallback(self, mock_openclaw, mock_nanobot):
mock_nanobot.return_value = {"api_key": "host-key", "api_base": "https://host.example/v1"}
mock_openclaw.return_value = None

with patch.dict(
os.environ,
{
"OPENSPACE_LLM_API_KEY": " ",
"OPENSPACE_LLM_API_BASE": "\t",
"OPENROUTER_API_KEY": "",
},
clear=False,
):
model, kwargs = build_llm_kwargs("openrouter/anthropic/claude-sonnet-4.5")

self.assertEqual(model, "openrouter/anthropic/claude-sonnet-4.5")
self.assertEqual(kwargs.get("api_key"), "host-key")
self.assertEqual(kwargs.get("api_base"), "https://host.example/v1")

def test_whitespace_config_path_returns_none(self):
with patch.dict(os.environ, {"OPENSPACE_CONFIG_PATH": " "}, clear=False):
self.assertIsNone(build_grounding_config_path())


if __name__ == "__main__":
unittest.main()