Skip to content

Commit 1649182

Browse files
authored
Merge branch 'pangea-v1alpha' into feat-b358215039-add-foreigntypeinfo-test
2 parents 6830e40 + 236455c commit 1649182

File tree

3 files changed

+20
-27
lines changed

3 files changed

+20
-27
lines changed

google/cloud/bigquery/external_config.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1027,7 +1027,7 @@ def __init__(
10271027
default_storage_location_uri: Optional[str] = None,
10281028
parameters: Optional[Dict[str, Any]] = None,
10291029
):
1030-
self._properties = {}
1030+
self._properties: Dict[str, Any] = {}
10311031
self.default_storage_location_uri = default_storage_location_uri
10321032
self.parameters = parameters
10331033

@@ -1168,6 +1168,7 @@ def to_api_repr(self) -> dict:
11681168
Dict[str, Any]:
11691169
A dictionary in the format used by the BigQuery API.
11701170
"""
1171+
11711172
config = copy.deepcopy(self._properties)
11721173
return config
11731174

@@ -1186,3 +1187,6 @@ def from_api_repr(cls, resource: dict) -> ExternalCatalogTableOptions:
11861187
config = cls()
11871188
config._properties = copy.deepcopy(resource)
11881189
return config
1190+
1191+
def __eq__(self, value):
1192+
return self.to_api_repr() == value.to_api_repr()

google/cloud/bigquery/schema.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -659,8 +659,8 @@ class StorageDescriptor:
659659
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum
660660
length is 128 characters.
661661
locationUri (Optional[str]): The physical location of the table (e.g.
662-
`gs://spark-dataproc-data/pangea-data/case_sensitive/` or
663-
`gs://spark-dataproc-data/pangea-data/*`). The maximum length is
662+
'gs://spark-dataproc-data/pangea-data/case_sensitive/' or
663+
'gs://spark-dataproc-data/pangea-data/'). The maximum length is
664664
2056 bytes.
665665
outputFormat (Optional[str]): Specifies the fully qualified class name
666666
of the OutputFormat (e.g.
@@ -676,7 +676,7 @@ def __init__(
676676
output_format: Optional[str] = None,
677677
serde_info: Optional[SerDeInfo] = None,
678678
):
679-
self._properties = {}
679+
self._properties: Dict[str, Any] = {}
680680
self.input_format = input_format
681681
self.location_uri = location_uri
682682
self.output_format = output_format
@@ -697,9 +697,9 @@ def input_format(self, value: Optional[str]):
697697

698698
@property
699699
def location_uri(self) -> Any:
700-
"""Optional. The physical location of the table (e.g. `gs://spark-
701-
dataproc-data/pangea-data/case_sensitive/` or `gs://spark-dataproc-
702-
data/pangea-data/*`). The maximum length is 2056 bytes."""
700+
"""Optional. The physical location of the table (e.g. 'gs://spark-
701+
dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-
702+
data/pangea-data/'). The maximum length is 2056 bytes."""
703703

704704
return self._properties.get("locationUri")
705705

@@ -726,9 +726,9 @@ def serde_info(self) -> Any:
726726
"""Optional. Serializer and deserializer information."""
727727

728728
prop = _get_sub_prop(self._properties, ["serDeInfo"])
729-
print(f"DINOSAUR in SD: {prop}\n\n{self._properties}")
730729
if prop is not None:
731-
prop = SerDeInfo().from_api_repr(prop)
730+
prop = StorageDescriptor().from_api_repr(prop)
731+
print(f"DINOSAUR prop: {prop}")
732732

733733
return prop
734734

@@ -787,7 +787,7 @@ def __init__(
787787
name: Optional[str] = None,
788788
parameters: Optional[dict[str, str]] = None,
789789
):
790-
self._properties = {}
790+
self._properties: Dict[str, Any] = {}
791791
self.serialization_library = serialization_library
792792
self.name = name
793793
self.parameters = parameters
@@ -850,6 +850,6 @@ def from_api_repr(cls, resource: dict) -> SerDeInfo:
850850
Returns:
851851
An instance of the class initialized with data from 'resource'.
852852
"""
853-
config = cls()
853+
config = cls("")
854854
config._properties = copy.deepcopy(resource)
855855
return config

tests/unit/test_table.py

Lines changed: 5 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5880,34 +5880,23 @@ def test_external_catalog_table_options_getter(
58805880
dataset = DatasetReference(self.PROJECT, self.DS_ID)
58815881
table_ref = dataset.table(self.TABLE_NAME)
58825882
table = self._make_one(table_ref)
5883+
expected = external_catalog_table_options
58835884

58845885
# Confirm that external catalog table options have not been set
58855886
assert table.external_catalog_table_options is None
58865887

58875888
# Add an ExternalCatalogTableOptions object to the table.
58885889
table._properties[
58895890
"externalCatalogTableOptions"
5890-
] = external_catalog_table_options
5891-
table_repr = table.to_api_repr()
5891+
] = external_catalog_table_options.to_api_repr()
58925892

58935893
# Extract the ecto object.
5894-
ecto_output = table_repr["externalCatalogTableOptions"]
5894+
result = table.external_catalog_table_options
58955895

58965896
# Confirm that external catalog table options are an
58975897
# ExternalCatalogTableOptions object
5898-
assert isinstance(ecto_output, ExternalCatalogTableOptions)
5899-
5900-
storage_descriptor = request.getfixturevalue("_make_storage_descriptor")
5901-
5902-
expected = {
5903-
"connectionId": "connection123",
5904-
"parameters": {"key": "value"},
5905-
"storageDescriptor": storage_descriptor.to_api_repr(),
5906-
}
5907-
result = ecto_output.to_api_repr()
5908-
5909-
# Confirm that the api_repr of the ecto_output matches the inputs
5910-
print(f"DINOSAUR : {result}\n\n{expected}")
5898+
assert isinstance(result, ExternalCatalogTableOptions)
5899+
assert isinstance(expected, ExternalCatalogTableOptions)
59115900
assert result == expected
59125901

59135902
def test_external_catalog_table_options_setter(

0 commit comments

Comments
 (0)