Skip to content

Commit 5dd9e6e

Browse files
shobsitswast
andauthored
test: do GCF cleanup in both presubmit and e2e tests (#423)
* test: do GCF cleanup in both presubmit and e2e tests * use functions client from session * address review comments --------- Co-authored-by: Tim Sweña (Swast) <swast@google.com>
1 parent 3bab1a9 commit 5dd9e6e

File tree

3 files changed

+185
-164
lines changed

3 files changed

+185
-164
lines changed

tests/system/conftest.py

+64-3
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import typing
2222
from typing import Dict, Optional
2323

24+
import google.api_core.exceptions
2425
import google.cloud.bigquery as bigquery
2526
import google.cloud.bigquery_connection_v1 as bigquery_connection_v1
2627
import google.cloud.exceptions
@@ -34,7 +35,15 @@
3435
import test_utils.prefixer
3536

3637
import bigframes
37-
from tests.system.utils import convert_pandas_dtypes
38+
import tests.system.utils
39+
40+
# Use this to control the number of cloud functions being deleted in a single
41+
# test session. This should help soften the spike of the number of mutations per
42+
# minute tracked against a quota limit (default 60, increased to 120 for
43+
# bigframes-dev project) by the Cloud Functions API
44+
# We are running pytest with "-n 20". Let's say each session lasts about a
45+
# minute, so we are setting a limit of 120/20 = 6 deletions per session.
46+
MAX_NUM_FUNCTIONS_TO_DELETE_PER_SESSION = 6
3847

3948
CURRENT_DIR = pathlib.Path(__file__).parent
4049
DATA_DIR = CURRENT_DIR.parent / "data"
@@ -348,7 +357,7 @@ def nested_pandas_df() -> pd.DataFrame:
348357
DATA_DIR / "nested.jsonl",
349358
lines=True,
350359
)
351-
convert_pandas_dtypes(df, bytes_col=True)
360+
tests.system.utils.convert_pandas_dtypes(df, bytes_col=True)
352361

353362
df = df.set_index("rowindex")
354363
return df
@@ -400,7 +409,7 @@ def scalars_pandas_df_default_index() -> pd.DataFrame:
400409
DATA_DIR / "scalars.jsonl",
401410
lines=True,
402411
)
403-
convert_pandas_dtypes(df, bytes_col=True)
412+
tests.system.utils.convert_pandas_dtypes(df, bytes_col=True)
404413

405414
df = df.set_index("rowindex", drop=False)
406415
df.index.name = None
@@ -1040,3 +1049,55 @@ def floats_bf(session, floats_pd):
10401049
@pytest.fixture()
10411050
def floats_product_bf(session, floats_product_pd):
10421051
return session.read_pandas(floats_product_pd)
1052+
1053+
1054+
@pytest.fixture(scope="session", autouse=True)
1055+
def cleanup_cloud_functions(session, cloudfunctions_client, dataset_id_permanent):
1056+
"""Clean up stale cloud functions."""
1057+
permanent_endpoints = tests.system.utils.get_remote_function_endpoints(
1058+
session.bqclient, dataset_id_permanent
1059+
)
1060+
delete_count = 0
1061+
for cloud_function in tests.system.utils.get_cloud_functions(
1062+
cloudfunctions_client,
1063+
session.bqclient.project,
1064+
session.bqclient.location,
1065+
name_prefix="bigframes-",
1066+
):
1067+
# Ignore bigframes cloud functions referred by the remote functions in
1068+
# the permanent dataset
1069+
if cloud_function.service_config.uri in permanent_endpoints:
1070+
continue
1071+
1072+
# Ignore the functions less than one day old
1073+
age = datetime.now() - datetime.fromtimestamp(
1074+
cloud_function.update_time.timestamp()
1075+
)
1076+
if age.days <= 0:
1077+
continue
1078+
1079+
# Go ahead and delete
1080+
try:
1081+
tests.system.utils.delete_cloud_function(
1082+
cloudfunctions_client, cloud_function.name
1083+
)
1084+
delete_count += 1
1085+
if delete_count >= MAX_NUM_FUNCTIONS_TO_DELETE_PER_SESSION:
1086+
break
1087+
except google.api_core.exceptions.NotFound:
1088+
# This can happen when multiple pytest sessions are running in
1089+
# parallel. Two or more sessions may discover the same cloud
1090+
# function, but only one of them would be able to delete it
1091+
# successfully, while the other instance will run into this
1092+
# exception. Ignore this exception.
1093+
pass
1094+
except google.api_core.exceptions.ResourceExhausted:
1095+
# This can happen if we are hitting GCP limits, e.g.
1096+
# google.api_core.exceptions.ResourceExhausted: 429 Quota exceeded
1097+
# for quota metric 'Per project mutation requests' and limit
1098+
# 'Per project mutation requests per minute per region' of service
1099+
# 'cloudfunctions.googleapis.com' for consumer
1100+
# 'project_number:1084210331973'.
1101+
# [reason: "RATE_LIMIT_EXCEEDED" domain: "googleapis.com" ...
1102+
# Let's stop further clean up and leave it to later.
1103+
break

0 commit comments

Comments
 (0)