Skip to content

Commit 1604139

Browse files
chore: run doctest and notebook tests in bigframes-testing project (#976)
* chore: target bigframes-testing project for doctest and notebook tests * skip the axis=1 multi index test temporarily * regionalized notebook to honor GOOGLE_CLOUD_PROJECT * temporarily disable southamerica-west1 for regionalization testing This is to confirm that only southamerica-west1 has issue running notebook test * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * restore southameria-west1 for regionalization testing --------- Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 7aaef6f commit 1604139

File tree

6 files changed

+22
-35
lines changed

6 files changed

+22
-35
lines changed

.kokoro/continuous/doctest.cfg

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,5 @@ env_vars: {
88

99
env_vars: {
1010
key: "GOOGLE_CLOUD_PROJECT"
11-
value: "bigframes-load-testing"
12-
}
13-
14-
env_vars: {
15-
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
16-
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
11+
value: "bigframes-testing"
1712
}

.kokoro/continuous/notebook.cfg

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,5 @@ env_vars: {
1313

1414
env_vars: {
1515
key: "GOOGLE_CLOUD_PROJECT"
16-
value: "bigframes-load-testing"
17-
}
18-
19-
env_vars: {
20-
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
21-
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
16+
value: "bigframes-testing"
2217
}

.kokoro/presubmit/doctest.cfg

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,5 @@ env_vars: {
88

99
env_vars: {
1010
key: "GOOGLE_CLOUD_PROJECT"
11-
value: "bigframes-load-testing"
12-
}
13-
14-
env_vars: {
15-
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
16-
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
11+
value: "bigframes-testing"
1712
}

.kokoro/presubmit/notebook.cfg

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,5 @@ env_vars: {
88

99
env_vars: {
1010
key: "GOOGLE_CLOUD_PROJECT"
11-
value: "bigframes-load-testing"
12-
}
13-
14-
env_vars: {
15-
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
16-
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
11+
value: "bigframes-testing"
1712
}

notebooks/location/regionalized.ipynb

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -47,32 +47,36 @@
4747
],
4848
"source": [
4949
"# Take multi-region US as the default BQ location, where most of the BQ data lies including the BQ public datasets\n",
50-
"BQ_LOCATION = \"us\"\n",
51-
"PROJECT = \"bigframes-dev\"\n",
50+
"import os\n",
51+
"\n",
52+
"PROJECT_ID = os.environ.get(\"GOOGLE_CLOUD_PROJECT\")\n",
53+
"BQ_LOCATION = os.environ.get(\"BIGQUERY_LOCATION\")\n",
54+
"\n",
55+
"if not PROJECT_ID:\n",
56+
" raise ValueError(\"Project must be set via environment variable GOOGLE_CLOUD_PROJECT\")\n",
57+
"if not BQ_LOCATION:\n",
58+
" raise ValueError(\"BQ location must be set via environment variable BIGQUERY_LOCATION\")\n",
59+
"\n",
5260
"DATASET = \"bigframes_testing\"\n",
5361
"PENGUINS_TABLE = \"bigquery-public-data.ml_datasets.penguins\"\n",
5462
"\n",
5563
"\n",
5664
"# Check for a location set in the environment and do location-specific setup if needed\n",
5765
"\n",
58-
"import os\n",
5966
"import google.api_core.exceptions\n",
6067
"from google.cloud import bigquery\n",
6168
"import bigframes\n",
62-
" \n",
63-
"env_bq_location = os.getenv(\"BIGQUERY_LOCATION\")\n",
64-
"if env_bq_location and env_bq_location != BQ_LOCATION:\n",
65-
" BQ_LOCATION = env_bq_location.lower()\n",
6669
"\n",
6770
"client = bigquery.Client()\n",
6871
"\n",
72+
"BQ_LOCATION = BQ_LOCATION.lower()\n",
6973
"if BQ_LOCATION != \"us\":\n",
7074
" bq_location_normalized = BQ_LOCATION.replace('-', '_')\n",
7175
"\n",
7276
" # Nominate a local penguins table\n",
7377
" penguins_table_ref = bigquery.TableReference.from_string(PENGUINS_TABLE)\n",
7478
" penguins_local_dataset_name = f\"{DATASET}_{bq_location_normalized}\"\n",
75-
" penguins_local_dataset_ref = bigquery.DatasetReference(project=PROJECT, dataset_id=penguins_local_dataset_name)\n",
79+
" penguins_local_dataset_ref = bigquery.DatasetReference(project=PROJECT_ID, dataset_id=penguins_local_dataset_name)\n",
7680
" penguins_local_dataset = bigquery.Dataset(penguins_local_dataset_ref)\n",
7781
" penguins_local_dataset.location = BQ_LOCATION\n",
7882
" penguins_local_table_ref= bigquery.TableReference(penguins_local_dataset, penguins_table_ref.table_id)\n",
@@ -94,13 +98,13 @@
9498
" DATASET = f\"{DATASET}_{bq_location_normalized}\"\n",
9599
"\n",
96100
"# Create the dataset to store the model if it doesn't exist \n",
97-
"model_local_dataset = bigquery.Dataset(bigquery.DatasetReference(project=PROJECT, dataset_id=DATASET))\n",
101+
"model_local_dataset = bigquery.Dataset(bigquery.DatasetReference(project=PROJECT_ID, dataset_id=DATASET))\n",
98102
"model_local_dataset.location = BQ_LOCATION\n",
99103
"model_dataset = client.create_dataset(model_local_dataset, exists_ok=True)\n",
100104
"\n",
101105
"# Finally log the variables driving the core notebook execution\n",
102106
"log = ('\\n'.join(f\"{name}: {str(value)}\" for name, value in {\n",
103-
" \"BigQuery project\" : PROJECT,\n",
107+
" \"BigQuery project\" : PROJECT_ID,\n",
104108
" \"BigQuery location\" : BQ_LOCATION,\n",
105109
" \"Penguins Table\" : PENGUINS_TABLE,\n",
106110
" \"ML Model Dataset\" : model_dataset.reference\n",
@@ -134,7 +138,7 @@
134138
"\n",
135139
"# Note: The project option is not required in all environments.\n",
136140
"# On BigQuery Studio, the project ID is automatically detected.\n",
137-
"bigframes.pandas.options.bigquery.project = PROJECT\n",
141+
"bigframes.pandas.options.bigquery.project = PROJECT_ID\n",
138142
"\n",
139143
"# Note: The location option is not required.\n",
140144
"# It defaults to the location of the first table or query\n",

scripts/setup-project-for-testing.sh

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,12 +57,14 @@ function log_and_execute() {
5757
################################################################################
5858
function enable_apis() {
5959
for service in aiplatform.googleapis.com \
60+
artifactregistry.googleapis.com \
6061
bigquery.googleapis.com \
6162
bigqueryconnection.googleapis.com \
6263
bigquerystorage.googleapis.com \
6364
cloudbuild.googleapis.com \
6465
cloudfunctions.googleapis.com \
6566
cloudresourcemanager.googleapis.com \
67+
compute.googleapis.com \
6668
run.googleapis.com \
6769
; do
6870
log_and_execute gcloud --project=$PROJECT_ID services enable $service
@@ -148,6 +150,7 @@ function ensure_bq_connections_with_iam() {
148150
southamerica-west1 \
149151
us \
150152
us-central1 \
153+
us-east5 \
151154
; do
152155
ensure_bq_connection_with_iam "$location" "$BIGFRAMES_RF_CONNECTION_NAME"
153156
done

0 commit comments

Comments
 (0)