Skip to content

Commit 236455c

Browse files
authored
fix: Updates mypy and pytype annotations, etc (#2072)
* Updates mypy and pytype annotations, etc * Adds more updates re: mypy and pytype * Updates ticks in docstring: hopefully eliminates docs error * Updates restructured text to avoid doc fail * Updates test of table property ECTO * reformats per linter * Updates test * removes minor comment
1 parent b67dda2 commit 236455c

File tree

3 files changed

+23
-30
lines changed

3 files changed

+23
-30
lines changed

google/cloud/bigquery/external_config.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1027,7 +1027,7 @@ def __init__(
10271027
default_storage_location_uri: Optional[str] = None,
10281028
parameters: Optional[Dict[str, Any]] = None,
10291029
):
1030-
self._properties = {}
1030+
self._properties: Dict[str, Any] = {}
10311031
self.default_storage_location_uri = default_storage_location_uri
10321032
self.parameters = parameters
10331033

@@ -1168,6 +1168,7 @@ def to_api_repr(self) -> dict:
11681168
Dict[str, Any]:
11691169
A dictionary in the format used by the BigQuery API.
11701170
"""
1171+
11711172
config = copy.deepcopy(self._properties)
11721173
return config
11731174

@@ -1186,3 +1187,6 @@ def from_api_repr(cls, resource: dict) -> ExternalCatalogTableOptions:
11861187
config = cls()
11871188
config._properties = copy.deepcopy(resource)
11881189
return config
1190+
1191+
def __eq__(self, value):
1192+
return self.to_api_repr() == value.to_api_repr()

google/cloud/bigquery/schema.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -610,7 +610,7 @@ class TableSchema:
610610
def __init__(
611611
self, fields: Optional[list] = None, foreign_type_info: Optional[str] = None
612612
):
613-
self._properties = {}
613+
self._properties: Dict[str, Any] = {}
614614
self.fields = fields
615615
self.foreign_type_info = foreign_type_info
616616

@@ -621,7 +621,7 @@ def fields(self) -> Any:
621621
return self._properties.get("fields")
622622

623623
@fields.setter
624-
def fields(self, value: list, dtype: str) -> str:
624+
def fields(self, value: list, dtype: str) -> None:
625625
value = _isinstance_or_raise(value, list, none_allowed=True)
626626
self._properties["fields"] = value
627627

@@ -633,7 +633,7 @@ def foreign_type_info(self) -> Any:
633633
return self._properties.get("foreignTypeInfo")
634634

635635
@foreign_type_info.setter
636-
def foreign_type_info(self, value: str, dtype: str) -> str:
636+
def foreign_type_info(self, value: str, dtype: str) -> None:
637637
if not isinstance(value, str):
638638
raise ValueError(
639639
f"Pass {value} as a '{repr(dtype)}'." f"Got {type(value)}."
@@ -701,8 +701,8 @@ class StorageDescriptor:
701701
"org.apache.hadoop.hive.ql.io.orc.OrcInputFormat"). The maximum
702702
length is 128 characters.
703703
locationUri (Optional[str]): The physical location of the table (e.g.
704-
`gs://spark-dataproc-data/pangea-data/case_sensitive/` or
705-
`gs://spark-dataproc-data/pangea-data/*`). The maximum length is
704+
'gs://spark-dataproc-data/pangea-data/case_sensitive/' or
705+
'gs://spark-dataproc-data/pangea-data/'). The maximum length is
706706
2056 bytes.
707707
outputFormat (Optional[str]): Specifies the fully qualified class name
708708
of the OutputFormat (e.g.
@@ -718,7 +718,7 @@ def __init__(
718718
output_format: Optional[str] = None,
719719
serde_info: Optional[SerDeInfo] = None,
720720
):
721-
self._properties = {}
721+
self._properties: Dict[str, Any] = {}
722722
self.input_format = input_format
723723
self.location_uri = location_uri
724724
self.output_format = output_format
@@ -739,9 +739,9 @@ def input_format(self, value: Optional[str]):
739739

740740
@property
741741
def location_uri(self) -> Any:
742-
"""Optional. The physical location of the table (e.g. `gs://spark-
743-
dataproc-data/pangea-data/case_sensitive/` or `gs://spark-dataproc-
744-
data/pangea-data/*`). The maximum length is 2056 bytes."""
742+
"""Optional. The physical location of the table (e.g. 'gs://spark-
743+
dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-
744+
data/pangea-data/'). The maximum length is 2056 bytes."""
745745

746746
return self._properties.get("locationUri")
747747

@@ -768,9 +768,9 @@ def serde_info(self) -> Any:
768768
"""Optional. Serializer and deserializer information."""
769769

770770
prop = _get_sub_prop(self._properties, ["serDeInfo"])
771-
print(f"DINOSAUR in SD: {prop}\n\n{self._properties}")
772771
if prop is not None:
773-
prop = SerDeInfo().from_api_repr(prop)
772+
prop = StorageDescriptor().from_api_repr(prop)
773+
print(f"DINOSAUR prop: {prop}")
774774

775775
return prop
776776

@@ -829,7 +829,7 @@ def __init__(
829829
name: Optional[str] = None,
830830
parameters: Optional[dict[str, str]] = None,
831831
):
832-
self._properties = {}
832+
self._properties: Dict[str, Any] = {}
833833
self.serialization_library = serialization_library
834834
self.name = name
835835
self.parameters = parameters
@@ -892,6 +892,6 @@ def from_api_repr(cls, resource: dict) -> SerDeInfo:
892892
Returns:
893893
An instance of the class initialized with data from 'resource'.
894894
"""
895-
config = cls()
895+
config = cls("")
896896
config._properties = copy.deepcopy(resource)
897897
return config

tests/unit/test_table.py

Lines changed: 5 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -5880,34 +5880,23 @@ def test_external_catalog_table_options_getter(
58805880
dataset = DatasetReference(self.PROJECT, self.DS_ID)
58815881
table_ref = dataset.table(self.TABLE_NAME)
58825882
table = self._make_one(table_ref)
5883+
expected = external_catalog_table_options
58835884

58845885
# Confirm that external catalog table options have not been set
58855886
assert table.external_catalog_table_options is None
58865887

58875888
# Add an ExternalCatalogTableOptions object to the table.
58885889
table._properties[
58895890
"externalCatalogTableOptions"
5890-
] = external_catalog_table_options
5891-
table_repr = table.to_api_repr()
5891+
] = external_catalog_table_options.to_api_repr()
58925892

58935893
# Extract the ecto object.
5894-
ecto_output = table_repr["externalCatalogTableOptions"]
5894+
result = table.external_catalog_table_options
58955895

58965896
# Confirm that external catalog table options are an
58975897
# ExternalCatalogTableOptions object
5898-
assert isinstance(ecto_output, ExternalCatalogTableOptions)
5899-
5900-
storage_descriptor = request.getfixturevalue("_make_storage_descriptor")
5901-
5902-
expected = {
5903-
"connectionId": "connection123",
5904-
"parameters": {"key": "value"},
5905-
"storageDescriptor": storage_descriptor.to_api_repr(),
5906-
}
5907-
result = ecto_output.to_api_repr()
5908-
5909-
# Confirm that the api_repr of the ecto_output matches the inputs
5910-
print(f"DINOSAUR : {result}\n\n{expected}")
5898+
assert isinstance(result, ExternalCatalogTableOptions)
5899+
assert isinstance(expected, ExternalCatalogTableOptions)
59115900
assert result == expected
59125901

59135902
def test_external_catalog_table_options_setter(

0 commit comments

Comments
 (0)