Skip to content

Commit 5e6e619

Browse files
committed
adds several tests: to_api_repr and from_api_repr
1 parent ff0645a commit 5e6e619

File tree

2 files changed

+192
-2
lines changed

2 files changed

+192
-2
lines changed

tests/unit/job/test_load.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -441,6 +441,24 @@ def test_from_api_repr_w_properties(self):
441441
self.assertIs(job._client, client)
442442
self._verifyResourceProperties(job, RESOURCE)
443443

444+
def test_to_api_repr(self):
445+
self._setUpConstants()
446+
client = _make_client(project=self.PROJECT)
447+
RESOURCE = self._make_resource(ended=False)
448+
449+
klass = self._get_target_class()
450+
job = klass.from_api_repr(RESOURCE, client)
451+
api_repr = job.to_api_repr()
452+
453+
# as per the documentation in load.py > LoadJob.to_api_repr(),
454+
# the return value from to_api_repr should not include statistics
455+
expected = {
456+
"jobReference": RESOURCE["jobReference"],
457+
"configuration": RESOURCE["configuration"],
458+
}
459+
460+
self.assertEqual(api_repr, expected)
461+
444462
def test_begin_w_already_running(self):
445463
conn = make_connection()
446464
client = _make_client(project=self.PROJECT, connection=conn)

tests/unit/job/test_load_config.py

Lines changed: 174 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,7 @@
1010
# distributed under the License is distributed on an "AS IS" BASIS,
1111
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
1212
# See the License for the specific language governing permissions and
13-
# limitations under the License.
14-
13+
# limitations under the License.n
1514
import copy
1615
import warnings
1716

@@ -917,3 +916,176 @@ def test_column_name_character_map_none(self):
917916
config._properties["load"]["columnNameCharacterMap"],
918917
ColumnNameCharacterMap.COLUMN_NAME_CHARACTER_MAP_UNSPECIFIED,
919918
)
919+
920+
RESOURCE = {
921+
"load": {
922+
"allowJaggedRows": True,
923+
"createDisposition": "CREATE_NEVER",
924+
"encoding": "UTF-8",
925+
"fieldDelimiter": ",",
926+
"ignoreUnknownValues": True,
927+
"maxBadRecords": 10,
928+
"nullMarker": "\\N",
929+
"quote": '"',
930+
"schema": {
931+
"fields": [
932+
{"name": "name", "type": "STRING", "mode": "NULLABLE"},
933+
{"name": "age", "type": "INTEGER", "mode": "NULLABLE"},
934+
]
935+
},
936+
"skipLeadingRows": "1",
937+
"sourceFormat": "CSV",
938+
"timePartitioning": {
939+
"type": "DAY",
940+
"field": "transaction_date",
941+
},
942+
"useAvroLogicalTypes": True,
943+
"writeDisposition": "WRITE_TRUNCATE",
944+
"timeZone": "America/New_York",
945+
"parquetOptions": {"enableListInference": True},
946+
"columnNameCharacterMap": "V2",
947+
"someNewField": "some-value",
948+
}
949+
}
950+
951+
def test_from_api_repr(self):
952+
from google.cloud.bigquery.job import (
953+
CreateDisposition,
954+
LoadJobConfig,
955+
SourceFormat,
956+
WriteDisposition,
957+
)
958+
from google.cloud.bigquery.schema import SchemaField
959+
from google.cloud.bigquery.table import TimePartitioning, TimePartitioningType
960+
961+
# from google.cloud.bigquery.format_options import ParquetOptions
962+
from google.cloud.bigquery.job.load import ColumnNameCharacterMap
963+
964+
# resource = {
965+
# "load": {
966+
# "allowJaggedRows": True,
967+
# "createDisposition": "CREATE_NEVER",
968+
# "encoding": "UTF-8",
969+
# "fieldDelimiter": ",",
970+
# "ignoreUnknownValues": True,
971+
# "maxBadRecords": 10,
972+
# "nullMarker": "\\N",
973+
# "quote": '"',
974+
# "schema": {
975+
# "fields": [
976+
# {"name": "name", "type": "STRING", "mode": "NULLABLE"},
977+
# {"name": "age", "type": "INTEGER", "mode": "NULLABLE"},
978+
# ]
979+
# },
980+
# "skipLeadingRows": "1",
981+
# "sourceFormat": "CSV",
982+
# "timePartitioning": {
983+
# "type": "DAY",
984+
# "field": "transaction_date",
985+
# },
986+
# "useAvroLogicalTypes": True,
987+
# "writeDisposition": "WRITE_TRUNCATE",
988+
# "timeZone": "America/New_York",
989+
# "parquetOptions": {"enableListInference": True},
990+
# "columnNameCharacterMap": "V2",
991+
# "someNewField": "some-value",
992+
# }
993+
# }
994+
995+
config = LoadJobConfig.from_api_repr(self.RESOURCE)
996+
997+
self.assertTrue(config.allow_jagged_rows)
998+
self.assertEqual(config.create_disposition, CreateDisposition.CREATE_NEVER)
999+
self.assertEqual(config.encoding, "UTF-8")
1000+
self.assertEqual(config.field_delimiter, ",")
1001+
self.assertTrue(config.ignore_unknown_values)
1002+
self.assertEqual(config.max_bad_records, 10)
1003+
self.assertEqual(config.null_marker, "\\N")
1004+
self.assertEqual(config.quote_character, '"')
1005+
self.assertEqual(
1006+
config.schema,
1007+
[SchemaField("name", "STRING"), SchemaField("age", "INTEGER")],
1008+
)
1009+
self.assertEqual(config.skip_leading_rows, 1)
1010+
self.assertEqual(config.source_format, SourceFormat.CSV)
1011+
self.assertEqual(
1012+
config.time_partitioning,
1013+
TimePartitioning(type_=TimePartitioningType.DAY, field="transaction_date"),
1014+
)
1015+
self.assertTrue(config.use_avro_logical_types)
1016+
self.assertEqual(config.write_disposition, WriteDisposition.WRITE_TRUNCATE)
1017+
self.assertEqual(config.time_zone, "America/New_York")
1018+
self.assertTrue(config.parquet_options.enable_list_inference)
1019+
self.assertEqual(config.column_name_character_map, ColumnNameCharacterMap.V2)
1020+
self.assertEqual(config._properties["load"]["someNewField"], "some-value")
1021+
1022+
def test_to_api_repr(self):
1023+
from google.cloud.bigquery.job import (
1024+
CreateDisposition,
1025+
LoadJobConfig,
1026+
SourceFormat,
1027+
WriteDisposition,
1028+
)
1029+
from google.cloud.bigquery.schema import SchemaField
1030+
from google.cloud.bigquery.table import TimePartitioning, TimePartitioningType
1031+
from google.cloud.bigquery.format_options import ParquetOptions
1032+
from google.cloud.bigquery.job.load import ColumnNameCharacterMap
1033+
1034+
config = LoadJobConfig()
1035+
config.allow_jagged_rows = True
1036+
config.create_disposition = CreateDisposition.CREATE_NEVER
1037+
config.encoding = "UTF-8"
1038+
config.field_delimiter = ","
1039+
config.ignore_unknown_values = True
1040+
config.max_bad_records = 10
1041+
config.null_marker = r"\N"
1042+
config.quote_character = '"'
1043+
config.schema = [SchemaField("name", "STRING"), SchemaField("age", "INTEGER")]
1044+
config.skip_leading_rows = 1
1045+
config.source_format = SourceFormat.CSV
1046+
config.time_partitioning = TimePartitioning(
1047+
type_=TimePartitioningType.DAY, field="transaction_date"
1048+
)
1049+
config.use_avro_logical_types = True
1050+
config.write_disposition = WriteDisposition.WRITE_TRUNCATE
1051+
config.time_zone = "America/New_York"
1052+
parquet_options = ParquetOptions()
1053+
parquet_options.enable_list_inference = True
1054+
config.parquet_options = parquet_options
1055+
config.column_name_character_map = ColumnNameCharacterMap.V2
1056+
config._properties["load"]["someNewField"] = "some-value"
1057+
1058+
api_repr = config.to_api_repr()
1059+
1060+
# expected = {
1061+
# "load": {
1062+
# "allowJaggedRows": True,
1063+
# "createDisposition": "CREATE_NEVER",
1064+
# "encoding": "UTF-8",
1065+
# "fieldDelimiter": ",",
1066+
# "ignoreUnknownValues": True,
1067+
# "maxBadRecords": 10,
1068+
# "nullMarker": r"\N",
1069+
# "quote": '"',
1070+
# "schema": {
1071+
# "fields": [
1072+
# {"name": "name", "type": "STRING", "mode": "NULLABLE"},
1073+
# {"name": "age", "type": "INTEGER", "mode": "NULLABLE"},
1074+
# ]
1075+
# },
1076+
# "skipLeadingRows": "1",
1077+
# "sourceFormat": "CSV",
1078+
# "timePartitioning": {
1079+
# "type": "DAY",
1080+
# "field": "transaction_date",
1081+
# },
1082+
# "useAvroLogicalTypes": True,
1083+
# "writeDisposition": "WRITE_TRUNCATE",
1084+
# "timeZone": "America/New_York",
1085+
# "parquetOptions": {"enableListInference": True},
1086+
# "columnNameCharacterMap": "V2",
1087+
# "someNewField": "some-value",
1088+
# }
1089+
# }
1090+
expected = self.RESOURCE
1091+
self.assertEqual(api_repr, expected)

0 commit comments

Comments
 (0)