Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,11 @@ authors = [
{ name = "John Walz", email = "john@validmind.ai" },
]
dependencies = [
"aiohttp[speedups]",
"requests",
"ipywidgets",
"kaleido (>=0.2.1,!=0.2.1.post1,<1.0.0)",
"matplotlib",
"mistune (>=3.0.2,<4.0.0)",
"nest-asyncio (>=1.6.0,<2.0.0)",
"openai (>=1)",
"pandas (>=2.0.3,<3.0.0)",
"plotly (>=5.0.0)",
Expand Down
143 changes: 143 additions & 0 deletions test_conversion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
#!/usr/bin/env python3
"""
Simple test to verify that the api_client conversion from aiohttp to requests works correctly.
"""

import os
import sys

# Add src directory to path
sys.path.insert(0, '/src')

def test_imports():
"""Test that all necessary modules can be imported."""
try:
# This should work even without dependencies
print("✅ Basic Python imports successful")
return True
except ImportError as e:
print(f"❌ Import failed: {e}")
return False

def test_api_client_structure():
"""Test that the api_client module has all expected functions."""
try:
# Read the file and check for function definitions
with open('/src/validmind/api_client.py', 'r') as f:
content = f.read()

# Check for key functions
expected_functions = [
'def init(',
'def get_api_host(',
'def get_api_model(',
'def log_metadata(',
'def log_figure(',
'def log_test_result(',
'def log_metric(',
'def generate_test_result_description(',
]

missing_functions = []
for func in expected_functions:
if func not in content:
missing_functions.append(func)

if missing_functions:
print(f"❌ Missing functions: {missing_functions}")
return False
else:
print("✅ All expected functions found in api_client.py")
return True
except Exception as e:
print(f"❌ Error checking api_client structure: {e}")
return False

def test_async_removal():
"""Test that async/await keywords have been removed."""
try:
with open('/src/validmind/api_client.py', 'r') as f:
content = f.read()

# Check that async/await are not used
if 'async def' in content:
print("❌ Found 'async def' - async functions not fully removed")
return False

if 'await ' in content:
print("❌ Found 'await' - async calls not fully removed")
return False

if 'aiohttp' in content:
print("❌ Found 'aiohttp' - dependency not fully removed")
return False

# Check that requests is used
if 'import requests' not in content:
print("❌ 'import requests' not found")
return False

print("✅ All async code properly converted to synchronous")
return True
except Exception as e:
print(f"❌ Error checking async removal: {e}")
return False

def test_dependencies_updated():
"""Test that pyproject.toml has been updated."""
try:
with open('/src/pyproject.toml', 'r') as f:
content = f.read()

# Check that aiohttp is removed and requests is added
if 'aiohttp[speedups]' in content:
print("❌ 'aiohttp[speedups]' still in dependencies")
return False

if '"requests",' not in content and '"requests"' not in content:
print("❌ 'requests' not found in dependencies")
return False

# Check that nest-asyncio is removed
if 'nest-asyncio' in content:
print("❌ 'nest-asyncio' still in dependencies")
return False

print("✅ Dependencies properly updated in pyproject.toml")
return True
except Exception as e:
print(f"❌ Error checking dependencies: {e}")
return False

def main():
"""Run all tests."""
print("Testing aiohttp to requests conversion...")
print("=" * 50)

tests = [
test_imports,
test_api_client_structure,
test_async_removal,
test_dependencies_updated,
]

results = []
for test in tests:
print(f"\nRunning {test.__name__}...")
results.append(test())

print("\n" + "=" * 50)
print("SUMMARY:")
passed = sum(results)
total = len(results)
print(f"✅ Passed: {passed}/{total}")

if passed == total:
print("\n🎉 All tests passed! The conversion was successful.")
return 0
else:
print(f"\n❌ {total - passed} tests failed. Please review the issues above.")
return 1

if __name__ == "__main__":
sys.exit(main())
55 changes: 13 additions & 42 deletions tests/test_api_client.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
import asyncio
import json
import os
import unittest
from unittest.mock import MagicMock, Mock, patch

import matplotlib.pyplot as plt
from aiohttp.formdata import FormData

# simluate environment variables being set
os.environ["VM_API_KEY"] = "your_api_key"
Expand All @@ -23,9 +21,6 @@
from validmind.vm_models.figure import Figure


loop = asyncio.new_event_loop()


def mock_figure():
fig = plt.figure()
plt.plot([1, 2, 3])
Expand All @@ -43,32 +38,7 @@ def json(self):
return self._json


class MockAsyncResponse:
def __init__(self, status, text=None, json=None):
self.status = status
self.status_code = status
self._text = text
self._json = json

async def text(self):
return self._text

async def json(self):
return self._json

async def __aexit__(self, exc_type, exc, tb):
pass

async def __aenter__(self):
return self


class TestAPIClient(unittest.TestCase):
def tearDownClass():
loop.close()

def run_async(self, func, *args, **kwargs):
return loop.run_until_complete(func(*args, **kwargs))

@patch("requests.get")
def test_init_successful(self, mock_requests_get):
Expand Down Expand Up @@ -145,23 +115,23 @@ def test_init_unsuccessful_ping(self, mock_get):
},
)

@patch("aiohttp.ClientSession.post")
@patch("requests.Session.post")
def test_log_figure_matplot(self, mock_post: MagicMock):
mock_post.return_value = MockAsyncResponse(200, json={"cuid": "1234"})
mock_post.return_value = MockResponse(200, json={"cuid": "1234"})

self.run_async(api_client.alog_figure, mock_figure())
api_client.log_figure(mock_figure())

url = f"{os.environ['VM_API_HOST']}/log_figure"
mock_post.assert_called_once()
self.assertEqual(mock_post.call_args[0][0], url)
self.assertIsInstance(mock_post.call_args[1]["data"], FormData)
# Check that files were passed
self.assertIn("files", mock_post.call_args[1])

@patch("aiohttp.ClientSession.post")
@patch("requests.Session.post")
def test_log_metadata(self, mock_post: MagicMock):
mock_post.return_value = MockAsyncResponse(200, json={"cuid": "abc1234"})
mock_post.return_value = MockResponse(200, json={"cuid": "abc1234"})

self.run_async(
api_client.alog_metadata,
api_client.log_metadata(
"1234",
text="Some Text",
_json={"key": "value"},
Expand All @@ -177,9 +147,10 @@ def test_log_metadata(self, mock_post: MagicMock):
"json": {"key": "value"},
}
),
headers={"Content-Type": "application/json"},
)

@patch("aiohttp.ClientSession.post")
@patch("requests.Session.post")
def test_log_test_result(self, mock_post):
result = {
"test_name": "test_name",
Expand All @@ -191,13 +162,13 @@ def test_log_test_result(self, mock_post):
"config": None,
}

mock_post.return_value = MockAsyncResponse(200, json={"cuid": "abc1234"})
mock_post.return_value = MockResponse(200, json={"cuid": "abc1234"})

self.run_async(api_client.alog_test_result, result)
api_client.log_test_result(result)

url = f"{os.environ['VM_API_HOST']}/log_test_results"

mock_post.assert_called_with(url, data=json.dumps(result))
mock_post.assert_called_with(url, data=json.dumps(result), headers={"Content-Type": "application/json"})


if __name__ == "__main__":
Expand Down
59 changes: 17 additions & 42 deletions tests/test_results.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import asyncio
import unittest
from unittest.mock import patch
import pandas as pd
Expand All @@ -16,35 +15,11 @@
from validmind.vm_models.figure import Figure
from validmind.errors import InvalidParameterError

loop = asyncio.new_event_loop()


class MockAsyncResponse:
def __init__(self, status, text=None, json_data=None):
self.status = status
self.status_code = status
self._text = text
self._json_data = json_data

async def text(self):
return self._text

async def json(self):
return self._json_data

async def __aexit__(self, exc_type, exc, tb):
pass

async def __aenter__(self):
return self


class TestResultClasses(unittest.TestCase):
def tearDownClass():
loop.close()

def run_async(self, func, *args, **kwargs):
return loop.run_until_complete(func(*args, **kwargs))

def test_raw_data_initialization(self):
"""Test RawData initialization and methods"""
Expand Down Expand Up @@ -150,22 +125,22 @@ def test_test_result_serialize(self):
self.assertTrue(serialized["passed"])
self.assertEqual(serialized["inputs"], []) # Empty inputs list

@patch("validmind.api_client.alog_test_result")
@patch("validmind.api_client.alog_figure")
@patch("validmind.api_client.alog_metric")
async def test_test_result_log_async(
@patch("validmind.api_client.log_test_result")
@patch("validmind.api_client.log_figure")
@patch("validmind.api_client.log_metric")
def test_test_result_log_sync(
self, mock_metric, mock_figure, mock_test_result
):
"""Test async logging of TestResult"""
mock_test_result.return_value = MockAsyncResponse(200, json={"cuid": "123"})
mock_figure.return_value = MockAsyncResponse(200, json={"cuid": "456"})
mock_metric.return_value = MockAsyncResponse(200, json={"cuid": "789"})
"""Test synchronous logging of TestResult"""
mock_test_result.return_value = {"cuid": "123"}
mock_figure.return_value = {"cuid": "456"}
mock_metric.return_value = {"cuid": "789"}

test_result = TestResult(
result_id="test_1", metric=0.95, description="Test description"
)

await test_result.log_async(section_id="section_1", position=0)
test_result.log_sync(section_id="section_1", position=0)

mock_test_result.assert_called_once()
mock_metric.assert_called_once()
Expand Down Expand Up @@ -207,33 +182,33 @@ def test_validate_log_config(self):
with self.assertRaises(InvalidParameterError):
test_result.validate_log_config(invalid_type_config)

@patch("validmind.api_client.update_metadata")
async def test_metadata_update_content_id_handling(self, mock_update_metadata):
@patch("validmind.vm_models.result.utils.update_metadata")
def test_metadata_update_content_id_handling(self, mock_update_metadata):
"""Test metadata update with different content_id scenarios"""
# Test case 1: With content_id
test_result = TestResult(
result_id="test_1",
description="Test description",
_was_description_generated=False,
)
await test_result.log_async(content_id="custom_content_id")
test_result.log_sync(content_id="custom_content_id")
mock_update_metadata.assert_called_with(
content_id="custom_content_id::default", text="Test description"
content_id="custom_content_id::Default Description", text="Test description"
)

# Test case 2: Without content_id
mock_update_metadata.reset_mock()
await test_result.log_async()
test_result.log_sync()
mock_update_metadata.assert_called_with(
content_id="test_description:test_1::default", text="Test description"
content_id="test_description:test_1::Default Description", text="Test description"
)

# Test case 3: With AI generated description
test_result._was_description_generated = True
mock_update_metadata.reset_mock()
await test_result.log_async()
test_result.log_sync()
mock_update_metadata.assert_called_with(
content_id="test_description:test_1::ai", text="Test description"
content_id="test_description:test_1::Generated by ValidMind AI", text="Test description"
)

def test_test_result_metric_values_integration(self):
Expand Down
Loading
Loading