-
Notifications
You must be signed in to change notification settings - Fork 76
[refactor] chore: support ci mutli tests #292
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
ec6141d
73c63ad
5e14def
7e78d06
f942c52
c9f8cf5
e12a640
bcbc607
458fdc7
0ac6d23
454bc3e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -32,12 +32,15 @@ | |
| ) | ||
| from flink_agents.plan.tools.function_tool import FunctionTool, from_callable | ||
|
|
||
| # Mark all tests in this module as ollama tests | ||
| pytestmark = pytest.mark.ollama | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This should be |
||
|
|
||
| test_model = os.environ.get("OLLAMA_CHAT_MODEL", "qwen3:0.6b") | ||
| current_dir = Path(__file__).parent | ||
|
|
||
| try: | ||
| # only auto setup ollama in ci with python 3.10 to reduce ci cost. | ||
| if "3.10" in sys.version: | ||
| # Auto setup ollama in CI environment (when CI env var is set) | ||
| if os.environ.get("CI") and sys.platform == "linux": | ||
| subprocess.run( | ||
| ["bash", f"{current_dir}/start_ollama_server.sh"], timeout=300, check=True | ||
| ) | ||
|
|
@@ -60,7 +63,9 @@ | |
| client is None, reason="Ollama client is not available or test model is missing" | ||
| ) | ||
| def test_ollama_chat() -> None: # noqa :D103 | ||
| server = OllamaChatModelConnection(name="ollama", request_timeout=120.0) | ||
| # Use longer timeout in CI environment (slower resources) | ||
| request_timeout = 120.0 if os.environ.get("CI") else 30.0 | ||
| server = OllamaChatModelConnection(name="ollama", request_timeout=request_timeout) | ||
| response = server.chat( | ||
| [ChatMessage(role=MessageRole.USER, content="Hello!")], model=test_model | ||
| ) | ||
|
|
@@ -94,7 +99,9 @@ def get_tool(name: str, type: ResourceType) -> FunctionTool: # noqa :D103 | |
| client is None, reason="Ollama client is not available or test model is missing" | ||
| ) | ||
| def test_ollama_chat_with_tools() -> None: # noqa :D103 | ||
| connection = OllamaChatModelConnection(name="ollama", request_timeout=120.0) | ||
| # Use longer timeout for tool calling in CI environment (slower resources) | ||
| request_timeout = 120.0 if os.environ.get("CI") else 30.0 | ||
| connection = OllamaChatModelConnection(name="ollama", request_timeout=request_timeout) | ||
|
|
||
| def get_resource(name: str, type: ResourceType) -> Resource: | ||
| if type == ResourceType.TOOL: | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -29,12 +29,15 @@ | |
| OllamaEmbeddingModelSetup, | ||
| ) | ||
|
|
||
| # Mark all tests in this module as ollama tests | ||
| pytestmark = pytest.mark.ollama | ||
|
|
||
| test_model = os.environ.get("OLLAMA_EMBEDDING_MODEL", "all-minilm:22m") | ||
| current_dir = Path(__file__).parent | ||
|
|
||
| try: | ||
| # only auto setup ollama in ci with python 3.10 to reduce ci cost. | ||
| if "3.10" in sys.version: | ||
| # Auto setup ollama in CI environment (when CI env var is set) | ||
| if os.environ.get("CI") and sys.platform == "linux": | ||
| subprocess.run( | ||
| ["bash", f"{current_dir}/start_ollama_server.sh"], timeout=300, check=True | ||
| ) | ||
|
|
@@ -58,9 +61,12 @@ | |
| ) | ||
| def test_ollama_embedding_setup() -> None: | ||
| """Test embedding functionality with OllamaEmbeddingModelSetup.""" | ||
| # Use longer timeout for embedding in CI environment (slower resources) | ||
| request_timeout = 120.0 if os.environ.get("CI") else 30.0 | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This request timeout could also happen in local environment, so can uniform to 120.0 |
||
| connection = OllamaEmbeddingModelConnection( | ||
| name="ollama_embed", | ||
| base_url="http://localhost:11434" | ||
| base_url="http://localhost:11434", | ||
| request_timeout=request_timeout | ||
| ) | ||
|
|
||
| def get_resource(name: str, type: ResourceType) -> Resource: | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -25,14 +25,18 @@ | |
| OpenAIEmbeddingModelSetup, | ||
| ) | ||
|
|
||
| # Mark all tests in this module as integration tests | ||
| pytestmark = pytest.mark.integration | ||
|
|
||
| test_model = os.environ.get("TEST_EMBEDDING_MODEL", "text-embedding-3-small") | ||
| api_key = os.environ.get("TEST_API_KEY") | ||
|
|
||
|
|
||
| @pytest.mark.skipif(api_key is None, reason="TEST_API_KEY is not set") | ||
| @pytest.mark.integration | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Have marked all tests in this module above, this decorator maybe not needed. |
||
| def test_openai_embedding_model() -> None: # noqa: D103 | ||
| connection = OpenAIEmbeddingModelConnection( | ||
| name="openai", api_key=api_key | ||
| name="openai", api_key=api_key or "fake-key" | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If this test is not skipped, the api_key must not be None, the |
||
| ) | ||
|
|
||
| def get_resource(name: str, type: ResourceType) -> Resource: | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -20,13 +20,6 @@ | |
|
|
||
| import pytest | ||
|
|
||
| try: | ||
| import chromadb # noqa: F401 | ||
|
|
||
| chromadb_available = True | ||
| except ImportError: | ||
| chromadb_available = False | ||
|
|
||
| from flink_agents.api.resource import Resource, ResourceType | ||
| from flink_agents.api.vector_stores.vector_store import ( | ||
| VectorStoreQuery, | ||
|
|
@@ -35,12 +28,25 @@ | |
| ChromaVectorStore, | ||
| ) | ||
|
|
||
| # Mark all tests in this module as integration tests | ||
| pytestmark = pytest.mark.integration | ||
|
|
||
| try: | ||
| import chromadb # noqa: F401 | ||
|
|
||
| chromadb_available = True | ||
| except ImportError: | ||
| chromadb_available = False | ||
|
|
||
| api_key = os.environ.get("TEST_API_KEY") | ||
| tenant = os.environ.get("TEST_TENANT") | ||
| database = os.environ.get("TEST_DATABASE") | ||
|
|
||
|
|
||
| class MockEmbeddingModel(Resource): # noqa: D101 | ||
| def __init__(self, name: str) -> None: # noqa: D107 | ||
| self._name = name | ||
|
|
||
| @classmethod | ||
| def resource_type(cls) -> ResourceType: # noqa: D102 | ||
| return ResourceType.EMBEDDING_MODEL | ||
|
|
@@ -113,6 +119,7 @@ def get_resource(name: str, resource_type: ResourceType) -> Resource: | |
|
|
||
|
|
||
| @pytest.mark.skipif(api_key is None, reason="TEST_API_KEY is not set") | ||
| @pytest.mark.integration | ||
| def test_cloud_chroma_vector_store() -> None: | ||
| """Test cloud ChromaDB vector store with embedding model integration.""" | ||
| embedding_model = MockEmbeddingModel(name="mock_embeddings") | ||
|
|
@@ -128,9 +135,9 @@ def get_resource(name: str, resource_type: ResourceType) -> Resource: | |
| name="chroma_vector_store", | ||
| embedding_model="mock_embeddings", | ||
| collection="test_collection", | ||
| api_key=api_key, | ||
| tenant=tenant, | ||
| database=database, | ||
| api_key=api_key or "fake-key", | ||
|
Collaborator
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ditto |
||
| tenant=tenant or "fake-tenant", | ||
| database=database or "fake-database", | ||
| get_resource=get_resource | ||
| ) | ||
|
|
||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Maybe this can be removed.