Skip to content

Commit b20d7d1

Browse files
1 parent 3063e72 commit b20d7d1

11 files changed

+218
-70
lines changed

clients/google-api-services-dataplex/v1/2.0.0/README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ Add the following lines to your `pom.xml` file:
2222
<dependency>
2323
<groupId>com.google.apis</groupId>
2424
<artifactId>google-api-services-dataplex</artifactId>
25-
<version>v1-rev20250129-2.0.0</version>
25+
<version>v1-rev20250222-2.0.0</version>
2626
</dependency>
2727
</dependencies>
2828
</project>
@@ -35,7 +35,7 @@ repositories {
3535
mavenCentral()
3636
}
3737
dependencies {
38-
implementation 'com.google.apis:google-api-services-dataplex:v1-rev20250129-2.0.0'
38+
implementation 'com.google.apis:google-api-services-dataplex:v1-rev20250222-2.0.0'
3939
}
4040
```
4141

clients/google-api-services-dataplex/v1/2.0.0/com/google/api/services/dataplex/v1/CloudDataplex.java

Lines changed: 12 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1699,10 +1699,7 @@ public List set(String parameterName, Object value) {
16991699
}
17001700
}
17011701
/**
1702-
* Looks up a single Entry by name using the permission on the source system.Caution: The BigQuery
1703-
* metadata that is stored in Dataplex Catalog is changing. For more information, see Changes to
1704-
* BigQuery metadata stored in Dataplex Catalog (https://cloud.google.com/dataplex/docs/biqquery-
1705-
* metadata-changes).
1702+
* Looks up an entry by name using the permission on the source system.
17061703
*
17071704
* Create a request for the method "locations.lookupEntry".
17081705
*
@@ -1727,10 +1724,7 @@ public class LookupEntry extends CloudDataplexRequest<com.google.api.services.da
17271724
java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+$");
17281725

17291726
/**
1730-
* Looks up a single Entry by name using the permission on the source system.Caution: The BigQuery
1731-
* metadata that is stored in Dataplex Catalog is changing. For more information, see Changes to
1732-
* BigQuery metadata stored in Dataplex Catalog (https://cloud.google.com/dataplex/docs/biqquery-
1733-
* metadata-changes).
1727+
* Looks up an entry by name using the permission on the source system.
17341728
*
17351729
* Create a request for the method "locations.lookupEntry".
17361730
*
@@ -7440,9 +7434,7 @@ public class DataTaxonomies {
74407434
* This request holds the parameters needed by the dataplex server. After setting any optional
74417435
* parameters, call the {@link Create#execute()} method to invoke the remote operation.
74427436
*
7443-
* @param parent Required. The resource name of the data taxonomy location, of the form:
7444-
* projects/{project_number}/locations/{location_id} where location_id refers to a GCP
7445-
* region.
7437+
* @param parent
74467438
* @param content the {@link com.google.api.services.dataplex.v1.model.GoogleCloudDataplexV1DataTaxonomy}
74477439
* @return the request
74487440
*/
@@ -7470,9 +7462,7 @@ public class Create extends CloudDataplexRequest<com.google.api.services.dataple
74707462
* Create#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)} must
74717463
* be called to initialize this instance immediately after invoking the constructor. </p>
74727464
*
7473-
* @param parent Required. The resource name of the data taxonomy location, of the form:
7474-
* projects/{project_number}/locations/{location_id} where location_id refers to a GCP
7475-
* region.
7465+
* @param parent
74767466
* @param content the {@link com.google.api.services.dataplex.v1.model.GoogleCloudDataplexV1DataTaxonomy}
74777467
* @since 1.13
74787468
*/
@@ -7541,26 +7531,16 @@ public Create setUploadProtocol(java.lang.String uploadProtocol) {
75417531
return (Create) super.setUploadProtocol(uploadProtocol);
75427532
}
75437533

7544-
/**
7545-
* Required. The resource name of the data taxonomy location, of the form:
7546-
* projects/{project_number}/locations/{location_id} where location_id refers to a GCP
7547-
* region.
7548-
*/
75497534
@com.google.api.client.util.Key
75507535
private java.lang.String parent;
75517536

7552-
/** Required. The resource name of the data taxonomy location, of the form:
7553-
projects/{project_number}/locations/{location_id} where location_id refers to a GCP region.
7537+
/**
7538+
75547539
*/
75557540
public java.lang.String getParent() {
75567541
return parent;
75577542
}
75587543

7559-
/**
7560-
* Required. The resource name of the data taxonomy location, of the form:
7561-
* projects/{project_number}/locations/{location_id} where location_id refers to a GCP
7562-
* region.
7563-
*/
75647544
public Create setParent(java.lang.String parent) {
75657545
if (!getSuppressPatternChecks()) {
75667546
com.google.api.client.util.Preconditions.checkArgument(PARENT_PATTERN.matcher(parent).matches(),
@@ -7795,8 +7775,7 @@ public Delete set(String parameterName, Object value) {
77957775
* This request holds the parameters needed by the dataplex server. After setting any optional
77967776
* parameters, call the {@link Get#execute()} method to invoke the remote operation.
77977777
*
7798-
* @param name Required. The resource name of the DataTaxonomy:
7799-
* projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}
7778+
* @param name
78007779
* @return the request
78017780
*/
78027781
public Get get(java.lang.String name) throws java.io.IOException {
@@ -7822,8 +7801,7 @@ public class Get extends CloudDataplexRequest<com.google.api.services.dataplex.v
78227801
* {@link Get#initialize(com.google.api.client.googleapis.services.AbstractGoogleClientRequest)}
78237802
* must be called to initialize this instance immediately after invoking the constructor. </p>
78247803
*
7825-
* @param name Required. The resource name of the DataTaxonomy:
7826-
* projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}
7804+
* @param name
78277805
* @since 1.13
78287806
*/
78297807
protected Get(java.lang.String name) {
@@ -7901,24 +7879,16 @@ public Get setUploadProtocol(java.lang.String uploadProtocol) {
79017879
return (Get) super.setUploadProtocol(uploadProtocol);
79027880
}
79037881

7904-
/**
7905-
* Required. The resource name of the DataTaxonomy:
7906-
* projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}
7907-
*/
79087882
@com.google.api.client.util.Key
79097883
private java.lang.String name;
79107884

7911-
/** Required. The resource name of the DataTaxonomy:
7912-
projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}
7885+
/**
7886+
79137887
*/
79147888
public java.lang.String getName() {
79157889
return name;
79167890
}
79177891

7918-
/**
7919-
* Required. The resource name of the DataTaxonomy:
7920-
* projects/{project_number}/locations/{location_id}/dataTaxonomies/{data_taxonomy_id}
7921-
*/
79227892
public Get setName(java.lang.String name) {
79237893
if (!getSuppressPatternChecks()) {
79247894
com.google.api.client.util.Preconditions.checkArgument(NAME_PATTERN.matcher(name).matches(),
@@ -12029,9 +11999,7 @@ public Delete set(String parameterName, Object value) {
1202911999
}
1203012000
}
1203112001
/**
12032-
* Gets an Entry.Caution: The BigQuery metadata that is stored in Dataplex Catalog is changing. For
12033-
* more information, see Changes to BigQuery metadata stored in Dataplex Catalog
12034-
* (https://cloud.google.com/dataplex/docs/biqquery-metadata-changes).
12002+
* Gets an Entry.
1203512003
*
1203612004
* Create a request for the method "entries.get".
1203712005
*
@@ -12056,9 +12024,7 @@ public class Get extends CloudDataplexRequest<com.google.api.services.dataplex.v
1205612024
java.util.regex.Pattern.compile("^projects/[^/]+/locations/[^/]+/entryGroups/[^/]+/entries/.*$");
1205712025

1205812026
/**
12059-
* Gets an Entry.Caution: The BigQuery metadata that is stored in Dataplex Catalog is changing.
12060-
* For more information, see Changes to BigQuery metadata stored in Dataplex Catalog
12061-
* (https://cloud.google.com/dataplex/docs/biqquery-metadata-changes).
12027+
* Gets an Entry.
1206212028
*
1206312029
* Create a request for the method "entries.get".
1206412030
*
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
/*
2+
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
3+
* in compliance with the License. You may obtain a copy of the License at
4+
*
5+
* http://www.apache.org/licenses/LICENSE-2.0
6+
*
7+
* Unless required by applicable law or agreed to in writing, software distributed under the License
8+
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
9+
* or implied. See the License for the specific language governing permissions and limitations under
10+
* the License.
11+
*/
12+
/*
13+
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
14+
* Modify at your own risk.
15+
*/
16+
17+
package com.google.api.services.dataplex.v1.model;
18+
19+
/**
20+
* Payload associated with Business Glossary related log events.
21+
*
22+
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
23+
* transmitted over HTTP when working with the Cloud Dataplex API. For a detailed explanation see:
24+
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
25+
* </p>
26+
*
27+
* @author Google, Inc.
28+
*/
29+
@SuppressWarnings("javadoc")
30+
public final class GoogleCloudDataplexV1BusinessGlossaryEvent extends com.google.api.client.json.GenericJson {
31+
32+
/**
33+
* The type of the event.
34+
* The value may be {@code null}.
35+
*/
36+
@com.google.api.client.util.Key
37+
private java.lang.String eventType;
38+
39+
/**
40+
* The log message.
41+
* The value may be {@code null}.
42+
*/
43+
@com.google.api.client.util.Key
44+
private java.lang.String message;
45+
46+
/**
47+
* Name of the resource.
48+
* The value may be {@code null}.
49+
*/
50+
@com.google.api.client.util.Key
51+
private java.lang.String resource;
52+
53+
/**
54+
* The type of the event.
55+
* @return value or {@code null} for none
56+
*/
57+
public java.lang.String getEventType() {
58+
return eventType;
59+
}
60+
61+
/**
62+
* The type of the event.
63+
* @param eventType eventType or {@code null} for none
64+
*/
65+
public GoogleCloudDataplexV1BusinessGlossaryEvent setEventType(java.lang.String eventType) {
66+
this.eventType = eventType;
67+
return this;
68+
}
69+
70+
/**
71+
* The log message.
72+
* @return value or {@code null} for none
73+
*/
74+
public java.lang.String getMessage() {
75+
return message;
76+
}
77+
78+
/**
79+
* The log message.
80+
* @param message message or {@code null} for none
81+
*/
82+
public GoogleCloudDataplexV1BusinessGlossaryEvent setMessage(java.lang.String message) {
83+
this.message = message;
84+
return this;
85+
}
86+
87+
/**
88+
* Name of the resource.
89+
* @return value or {@code null} for none
90+
*/
91+
public java.lang.String getResource() {
92+
return resource;
93+
}
94+
95+
/**
96+
* Name of the resource.
97+
* @param resource resource or {@code null} for none
98+
*/
99+
public GoogleCloudDataplexV1BusinessGlossaryEvent setResource(java.lang.String resource) {
100+
this.resource = resource;
101+
return this;
102+
}
103+
104+
@Override
105+
public GoogleCloudDataplexV1BusinessGlossaryEvent set(String fieldName, Object value) {
106+
return (GoogleCloudDataplexV1BusinessGlossaryEvent) super.set(fieldName, value);
107+
}
108+
109+
@Override
110+
public GoogleCloudDataplexV1BusinessGlossaryEvent clone() {
111+
return (GoogleCloudDataplexV1BusinessGlossaryEvent) super.clone();
112+
}
113+
114+
}

clients/google-api-services-dataplex/v1/2.0.0/com/google/api/services/dataplex/v1/model/GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig.java

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,25 @@ public final class GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfi
3737
@com.google.api.client.util.Key
3838
private java.lang.String connection;
3939

40+
/**
41+
* Optional. The location of the BigQuery dataset to publish BigLake external or non-BigLake
42+
* external tables to. 1. If the Cloud Storage bucket is located in a multi-region bucket, then
43+
* BigQuery dataset can be in the same multi-region bucket or any single region that is included
44+
* in the same multi-region bucket. The datascan can be created in any single region that is
45+
* included in the same multi-region bucket 2. If the Cloud Storage bucket is located in a dual-
46+
* region bucket, then BigQuery dataset can be located in regions that are included in the dual-
47+
* region bucket, or in a multi-region that includes the dual-region. The datascan can be created
48+
* in any single region that is included in the same dual-region bucket. 3. If the Cloud Storage
49+
* bucket is located in a single region, then BigQuery dataset can be in the same single region or
50+
* any multi-region bucket that includes the same single region. The datascan will be created in
51+
* the same single region as the bucket. 4. If the BigQuery dataset is in single region, it must
52+
* be in the same single region as the datascan.For supported values, refer to
53+
* https://cloud.google.com/bigquery/docs/locations#supported_locations.
54+
* The value may be {@code null}.
55+
*/
56+
@com.google.api.client.util.Key
57+
private java.lang.String location;
58+
4059
/**
4160
* Optional. Determines whether to publish discovered tables as BigLake external tables or non-
4261
* BigLake external tables.
@@ -64,6 +83,47 @@ public GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig setConnect
6483
return this;
6584
}
6685

86+
/**
87+
* Optional. The location of the BigQuery dataset to publish BigLake external or non-BigLake
88+
* external tables to. 1. If the Cloud Storage bucket is located in a multi-region bucket, then
89+
* BigQuery dataset can be in the same multi-region bucket or any single region that is included
90+
* in the same multi-region bucket. The datascan can be created in any single region that is
91+
* included in the same multi-region bucket 2. If the Cloud Storage bucket is located in a dual-
92+
* region bucket, then BigQuery dataset can be located in regions that are included in the dual-
93+
* region bucket, or in a multi-region that includes the dual-region. The datascan can be created
94+
* in any single region that is included in the same dual-region bucket. 3. If the Cloud Storage
95+
* bucket is located in a single region, then BigQuery dataset can be in the same single region or
96+
* any multi-region bucket that includes the same single region. The datascan will be created in
97+
* the same single region as the bucket. 4. If the BigQuery dataset is in single region, it must
98+
* be in the same single region as the datascan.For supported values, refer to
99+
* https://cloud.google.com/bigquery/docs/locations#supported_locations.
100+
* @return value or {@code null} for none
101+
*/
102+
public java.lang.String getLocation() {
103+
return location;
104+
}
105+
106+
/**
107+
* Optional. The location of the BigQuery dataset to publish BigLake external or non-BigLake
108+
* external tables to. 1. If the Cloud Storage bucket is located in a multi-region bucket, then
109+
* BigQuery dataset can be in the same multi-region bucket or any single region that is included
110+
* in the same multi-region bucket. The datascan can be created in any single region that is
111+
* included in the same multi-region bucket 2. If the Cloud Storage bucket is located in a dual-
112+
* region bucket, then BigQuery dataset can be located in regions that are included in the dual-
113+
* region bucket, or in a multi-region that includes the dual-region. The datascan can be created
114+
* in any single region that is included in the same dual-region bucket. 3. If the Cloud Storage
115+
* bucket is located in a single region, then BigQuery dataset can be in the same single region or
116+
* any multi-region bucket that includes the same single region. The datascan will be created in
117+
* the same single region as the bucket. 4. If the BigQuery dataset is in single region, it must
118+
* be in the same single region as the datascan.For supported values, refer to
119+
* https://cloud.google.com/bigquery/docs/locations#supported_locations.
120+
* @param location location or {@code null} for none
121+
*/
122+
public GoogleCloudDataplexV1DataDiscoverySpecBigQueryPublishingConfig setLocation(java.lang.String location) {
123+
this.location = location;
124+
return this;
125+
}
126+
67127
/**
68128
* Optional. Determines whether to publish discovered tables as BigLake external tables or non-
69129
* BigLake external tables.

clients/google-api-services-dataplex/v1/2.0.0/com/google/api/services/dataplex/v1/model/GoogleCloudDataplexV1DataProfileSpec.java

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,9 @@ public final class GoogleCloudDataplexV1DataProfileSpec extends com.google.api.c
5454

5555
/**
5656
* Optional. A filter applied to all rows in a single DataScan job. The filter needs to be a valid
57-
* SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2
58-
* < 10
57+
* SQL expression for a WHERE clause in GoogleSQL syntax
58+
* (https://cloud.google.com/bigquery/docs/reference/standard-sql/query-
59+
* syntax#where_clause).Example: col1 >= 0 AND col2 < 10
5960
* The value may be {@code null}.
6061
*/
6162
@com.google.api.client.util.Key
@@ -127,8 +128,9 @@ public GoogleCloudDataplexV1DataProfileSpec setPostScanActions(GoogleCloudDatapl
127128

128129
/**
129130
* Optional. A filter applied to all rows in a single DataScan job. The filter needs to be a valid
130-
* SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2
131-
* < 10
131+
* SQL expression for a WHERE clause in GoogleSQL syntax
132+
* (https://cloud.google.com/bigquery/docs/reference/standard-sql/query-
133+
* syntax#where_clause).Example: col1 >= 0 AND col2 < 10
132134
* @return value or {@code null} for none
133135
*/
134136
public java.lang.String getRowFilter() {
@@ -137,8 +139,9 @@ public java.lang.String getRowFilter() {
137139

138140
/**
139141
* Optional. A filter applied to all rows in a single DataScan job. The filter needs to be a valid
140-
* SQL expression for a WHERE clause in BigQuery standard SQL syntax. Example: col1 >= 0 AND col2
141-
* < 10
142+
* SQL expression for a WHERE clause in GoogleSQL syntax
143+
* (https://cloud.google.com/bigquery/docs/reference/standard-sql/query-
144+
* syntax#where_clause).Example: col1 >= 0 AND col2 < 10
142145
* @param rowFilter rowFilter or {@code null} for none
143146
*/
144147
public GoogleCloudDataplexV1DataProfileSpec setRowFilter(java.lang.String rowFilter) {

clients/google-api-services-dataplex/v1/2.0.0/com/google/api/services/dataplex/v1/model/GoogleCloudDataplexV1DataQualityRuleRowConditionExpectation.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@
1818

1919
/**
2020
* Evaluates whether each row passes the specified condition.The SQL expression needs to use
21-
* BigQuery standard SQL syntax and should produce a boolean value per row as the result.Example:
22-
* col1 >= 0 AND col2 < 10
21+
* GoogleSQL syntax (https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax) and
22+
* should produce a boolean value per row as the result.Example: col1 >= 0 AND col2 < 10
2323
*
2424
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
2525
* transmitted over HTTP when working with the Cloud Dataplex API. For a detailed explanation see:

0 commit comments

Comments
 (0)