-
Notifications
You must be signed in to change notification settings - Fork 32
✅ Add e2e tests for metamodeling #8457
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: master
Are you sure you want to change the base?
Changes from all commits
6ff45b1
0bfbadb
7d56afb
10844f5
a162033
0aa6c95
3b02b74
f03ba78
51cbfff
65713e7
b82c55b
6e605c5
b4cc6f0
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -220,50 +220,136 @@ def test_response_surface_modeling( | |
| create_function_from_project(page, our_project["uuid"]) | ||
|
|
||
| # 3. start a RSM with that function | ||
| service_keys = [ | ||
| "mmux-vite-app-moga-write", | ||
| "mmux-vite-app-sumo-write", | ||
| "mmux-vite-app-uq-write", | ||
| ] | ||
|
|
||
| with log_context( | ||
| logging.INFO, | ||
| f"Waiting for {service_key} to be responsive (waiting for {_DEFAULT_RESPONSE_TO_WAIT_FOR})", | ||
| ): | ||
| project_data = create_project_from_service_dashboard( | ||
| ServiceType.DYNAMIC, service_key, None, service_version | ||
| ) | ||
| assert "workbench" in project_data, "Expected workbench to be in project data!" | ||
| assert isinstance( | ||
| project_data["workbench"], dict | ||
| ), "Expected workbench to be a dict!" | ||
| node_ids: list[str] = list(project_data["workbench"]) | ||
| assert len(node_ids) == 1, "Expected 1 node in the workbench!" | ||
| for service_key in service_keys: | ||
| with log_context( | ||
| logging.INFO, | ||
| f"Waiting for {service_key} to be responsive (waiting for {_DEFAULT_RESPONSE_TO_WAIT_FOR})", | ||
| ): | ||
| project_data = create_project_from_service_dashboard( | ||
| ServiceType.DYNAMIC, service_key, None, service_version | ||
| ) | ||
| assert ( | ||
| "workbench" in project_data | ||
| ), "Expected workbench to be in project data!" | ||
| assert isinstance( | ||
| project_data["workbench"], dict | ||
| ), "Expected workbench to be a dict!" | ||
| node_ids: list[str] = list(project_data["workbench"]) | ||
| assert len(node_ids) == 1, "Expected 1 node in the workbench!" | ||
|
|
||
| wait_for_service_running( | ||
| page=page, | ||
| node_id=node_ids[0], | ||
| websocket=log_in_and_out, | ||
| timeout=_WAITING_FOR_SERVICE_TO_START, | ||
| press_start_button=False, | ||
| product_url=product_url, | ||
| is_service_legacy=is_service_legacy, | ||
| ) | ||
|
|
||
| wait_for_service_running( | ||
| page=page, | ||
| node_id=node_ids[0], | ||
| websocket=log_in_and_out, | ||
| timeout=_WAITING_FOR_SERVICE_TO_START, | ||
| press_start_button=False, | ||
| product_url=product_url, | ||
| is_service_legacy=is_service_legacy, | ||
| ) | ||
| service_iframe = page.frame_locator("iframe") | ||
| with log_context(logging.INFO, "Waiting for the RSM to be ready..."): | ||
| service_iframe.get_by_role("grid").wait_for( | ||
| state="visible", timeout=_WAITING_FOR_SERVICE_TO_APPEAR | ||
| ) | ||
|
|
||
| service_iframe = page.frame_locator("iframe") | ||
| with log_context(logging.INFO, "Waiting for the RSM to be ready..."): | ||
| service_iframe.get_by_role("grid").wait_for( | ||
| state="visible", timeout=_WAITING_FOR_SERVICE_TO_APPEAR | ||
| ) | ||
| # select the function | ||
| with log_context(logging.INFO, "Selected test function..."): | ||
| service_iframe.get_by_role("button", name="SELECT").nth(0).click() | ||
|
|
||
| page.wait_for_timeout(10000) | ||
| with log_context(logging.INFO, "Filling the input parameters..."): | ||
| min_test_id = "Mean" if "uq" in service_key.lower() else "Min" | ||
| min_inputs = service_iframe.locator( | ||
| f'[mmux-testid="input-block-{min_test_id}"] input[type="number"]' | ||
| ) | ||
| count_min = min_inputs.count() | ||
|
|
||
| # # select the function | ||
| # service_iframe.get_by_role("gridcell", name=_FUNCTION_NAME).click() | ||
| for i in range(count_min): | ||
| input_field = min_inputs.nth(i) | ||
| input_field.fill(str(i + 1)) | ||
| print(f"Filled {min_test_id} input {i} with value {i + 1}") | ||
| assert input_field.input_value() == str(i + 1) | ||
|
|
||
| # # Find the first input field (textbox) in the iframe | ||
| # min_input_field = service_iframe.get_by_role("textbox").nth(0) | ||
| # min_input_field.fill("1") | ||
| # max_input_field = service_iframe.get_by_role("textbox").nth(1) | ||
| # max_input_field.fill("10") | ||
| max_test_id = "Standard Deviation" if "uq" in service_key.lower() else "Max" | ||
| max_inputs = service_iframe.locator( | ||
| f'[mmux-testid="input-block-{max_test_id}"] input[type="number"]' | ||
| ) | ||
| count_max = max_inputs.count() | ||
|
|
||
| for i in range(count_max): | ||
| input_field = max_inputs.nth(i) | ||
| input_field.fill(str((i + 1) * 10)) | ||
| print(f"Filled {max_test_id} input {i} with value {(i + 1) * 10}") | ||
| assert input_field.input_value() == str((i + 1) * 10) | ||
|
|
||
| page.wait_for_timeout(1000) | ||
| page.keyboard.press("Tab") | ||
| page.wait_for_timeout(1000) | ||
|
|
||
| if "moga" in service_key.lower(): | ||
| with log_context(logging.INFO, "Filling the output parameters..."): | ||
| output_plus_button = service_iframe.locator( | ||
| '[mmux-testid="add-output-var-btn"]' | ||
| ) | ||
|
|
||
| output_plus_button.click() | ||
| page.wait_for_timeout(1000) | ||
|
|
||
| output_confirm_button = service_iframe.locator( | ||
| '[mmux-testid="confirm-add-output-btn"]' | ||
| ) | ||
| output_confirm_button.click() | ||
| page.wait_for_timeout(1000) | ||
|
|
||
| # Click the next button | ||
| with log_context(logging.INFO, "Clicking Next to go to the next step..."): | ||
| service_iframe.locator('[mmux-testid="next-button"]').click() | ||
| page.wait_for_timeout(1000) | ||
|
|
||
| with log_context(logging.INFO, "Starting the sampling..."): | ||
| service_iframe.locator('[mmux-testid="extend-sampling-btn"]').click() | ||
| page.wait_for_timeout(1000) | ||
| service_iframe.locator('[mmux-testid="new-sampling-campaign-btn"]').click() | ||
| page.wait_for_timeout(1000) | ||
| service_iframe.locator('[mmux-testid="run-sampling-btn"]').click() | ||
| page.wait_for_timeout(1000) | ||
|
|
||
|
Comment on lines
+317
to
+322
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. why all these timeouts in between? is this for you while debugging? |
||
| with log_context(logging.INFO, "Waiting for the sampling to complete..."): | ||
| toast = service_iframe.locator("div.Toastify__toast").filter( | ||
| has_text="Sampling started running successfully, please wait for completion." | ||
| ) | ||
| toast.wait_for(state="visible", timeout=120000) # waits up to 120 seconds | ||
|
|
||
| # # click on next | ||
| # service_iframe.get_by_role("button", name="Next").click() | ||
| with log_context(logging.INFO, "Waiting for the sampling to complete..."): | ||
| plotly_graph = service_iframe.locator(".js-plotly-plot") | ||
| plotly_graph.wait_for(state="visible", timeout=300000) | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I am a bit confused here, so you wait for the graph to show up? But do you check if the sample created is included? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. the function is created specifically for the test, so it's empty and has no jobs, then we launch a sampling campaign which will update the UI when the jobs are ready, as the runner is a jsonifier and we have timeouts, more than five of them consistently finish when the launching is done, and thus the plot is displayed. the display of the plot can only happen when you have more than 5 jobs listed and finished. if we want to wait for every job to finish, we can wait longer to do a refresh or implement a refresh mechanism every 10 seconds to wait for all of them to be complete do you require the testing to also check the execution for failed jobs? we can fail the test if any job fails There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I'm not saying every job has to finish. But I think it would be best to check if the jobs are listed at least? |
||
| page.wait_for_timeout(2000) | ||
|
|
||
| with ( | ||
| log_context(logging.INFO, "Go back to dashboard"), | ||
| page.expect_response( | ||
| re.compile(r"/projects\?.+") | ||
| ) as list_projects_response, | ||
| ): | ||
| page.get_by_test_id("dashboardBtn").click() | ||
| page.get_by_test_id("confirmDashboardBtn").click() | ||
| assert ( | ||
| list_projects_response.value.ok | ||
| ), f"Failed to list projects: {list_projects_response.value.status}" | ||
| page.wait_for_timeout(2000) | ||
|
|
||
| # # then we wait a long time | ||
| # page.wait_for_timeout(1 * MINUTE) | ||
| page.wait_for_timeout(1 * MINUTE) | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. what is this for? if this from me, it was only for me to debug locally and see stuff in the created browser debugger. you can safely remove it as this is just wasting time and money ;) |
||
|
|
||
| # TODO: more steps | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. no TODO allowed :-) Create follow up issue |
||
| # Run this tests against MOGA, UQ and SUMO services before destroying the functions / jobs / etc.. | ||
alexpargon marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| # 4. cleanup | ||
wvangeit marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| # - drop the Jobs created for the Function | ||
| # - drop the function proper to avoid polluting the DB | ||
| # - drop the jsonifier project | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I would also use a mmux-testid if possible in lines 258 and 264