File tree

4 files changed

+47
-0
lines changed

4 files changed

+47
-0
lines changed
Original file line numberDiff line numberDiff line change
@@ -180,9 +180,11 @@ def teardown_training_pipeline(shared_state, pipeline_client):
180180
)
181181

182182
# Waiting for training pipeline to be in CANCELLED state
183+
timeout = shared_state["cancel_batch_prediction_job_timeout"]
183184
helpers.wait_for_job_state(
184185
get_job_method=pipeline_client.get_training_pipeline,
185186
name=shared_state["training_pipeline_name"],
187+
timeout=timeout,
186188
)
187189

188190
# Delete the training pipeline
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
1+
import collections
12
import re
23
import time
4+
from timeit import default_timer as timer
35

46
from typing import Callable
57

@@ -63,3 +65,21 @@ def wait_for_job_state(
6365
"\nTry increasing the timeout in sample test"
6466
f"\nLast recorded state: {response.state}"
6567
)
68+
69+
70+
def flaky_test_diagnostic(file_name, test_name, N=20):
71+
72+
import pytest
73+
74+
timing_dict = collections.defaultdict(list)
75+
for ri in range(N):
76+
start = timer()
77+
result = pytest.main(['-s', f'{file_name}::{test_name}'])
78+
end = timer()
79+
delta = end-start
80+
if result == pytest.ExitCode.OK:
81+
timing_dict['SUCCESS'].append(delta)
82+
else:
83+
timing_dict['FAILURE'].append(delta)
84+
85+
return timing_dict
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,9 @@ def teardown(teardown_training_pipeline):
3434

3535
def test_ucaip_generated_create_training_pipeline_sample(capsys, shared_state):
3636

37+
# The return of the cancellation can be flaky; max of 20 runs was 215 sec.
38+
shared_state["cancel_batch_prediction_job_timeout"] = 300
39+
3740
create_training_pipeline_tabular_regression_sample.create_training_pipeline_tabular_regression_sample(
3841
project=PROJECT_ID,
3942
display_name=DISPLAY_NAME,
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
import os
2+
import pathlib
3+
import sys
4+
5+
# Run test in snippets directory:
6+
dir_of_curr_file = os.path.dirname(__file__)
7+
helper_filepath = pathlib.Path(dir_of_curr_file).parent / 'samples' / 'snippets'
8+
sys.path.append(helper_filepath.resolve().as_posix())
9+
os.chdir(helper_filepath.resolve())
10+
from helpers import flaky_test_diagnostic
11+
12+
# Settings:
13+
file_name = 'pipeline_service/create_training_pipeline_tabular_regression_sample_test.py'
14+
test_name = 'test_ucaip_generated_create_training_pipeline_sample'
15+
timing_dict = flaky_test_diagnostic(file_name, test_name, N=1)
16+
17+
for key, delta_list in timing_dict.items():
18+
mean_time = sum(delta_list)/len(delta_list)
19+
max_time = max(delta_list)
20+
min_time = min(delta_list)
21+
report_string = f'Result: {key}, mean={mean_time:3.2f}, min={min_time:3.2f}, max={max_time:3.2f}, count={len(delta_list)}'
22+
print(report_string)

0 commit comments

Comments
 (0)