Skip to content

Commit f301442

Browse files
authored
CLN: remove workaround for streaming delays. (#173)
Now that load jobs are used, no sleep is required when replacing a table.
1 parent 0069da3 commit f301442

File tree

2 files changed

+0
-26
lines changed

2 files changed

+0
-26
lines changed

pandas_gbq/gbq.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import time
55
import warnings
66
from datetime import datetime
7-
from time import sleep
87

98
import numpy as np
109
from pandas import DataFrame, compat
@@ -690,24 +689,10 @@ def schema_is_subset(self, dataset_id, table_id, schema):
690689
return all(field in fields_remote for field in fields_local)
691690

692691
def delete_and_recreate_table(self, dataset_id, table_id, table_schema):
693-
delay = 0
694-
695-
# Changes to table schema may take up to 2 minutes as of May 2015 See
696-
# `Issue 191
697-
# <https://code.google.com/p/google-bigquery/issues/detail?id=191>`__
698-
# Compare previous schema with new schema to determine if there should
699-
# be a 120 second delay
700-
701-
if not self.verify_schema(dataset_id, table_id, table_schema):
702-
logger.info('The existing table has a different schema. Please '
703-
'wait 2 minutes. See Google BigQuery issue #191')
704-
delay = 120
705-
706692
table = _Table(self.project_id, dataset_id,
707693
private_key=self.private_key)
708694
table.delete(table_id)
709695
table.create(table_id, table_schema)
710-
sleep(delay)
711696

712697

713698
def _get_credentials_file():

tests/system.py

Lines changed: 0 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -872,17 +872,6 @@ def test_upload_subset_columns_if_table_exists_append(self):
872872
private_key=self.credentials)
873873
assert result['num_rows'][0] == test_size * 2
874874

875-
# This test is currently failing intermittently due to changes in the
876-
# BigQuery backend. You can track the issue in the Google BigQuery issue
877-
# tracker `here <https://issuetracker.google.com/issues/64329577>`__.
878-
# Currently you need to stream data twice in order to successfully stream
879-
# data when you delete and re-create a table with a different schema.
880-
# Something to consider is that google-cloud-bigquery returns an array of
881-
# streaming insert errors rather than raising an exception. In this
882-
# scenario, a decision could be made by the user to check for streaming
883-
# errors and retry as needed. See `Issue 75
884-
# <https://github.com/pydata/pandas-gbq/issues/75>`__
885-
@pytest.mark.xfail(reason="Delete/create table w/ different schema issue")
886875
def test_upload_data_if_table_exists_replace(self):
887876
test_id = "4"
888877
test_size = 10

0 commit comments

Comments
 (0)