Skip to content

Commit 1a80633

Browse files
author
Frank Natividad
committed
Adding timestamp to static names in bigquery
There's an issue where Travis and Circle fail each others tests, because they're using the same static name to test with. Adding a timestamp helps generate a unique name.
1 parent e0c419f commit 1a80633

File tree

2 files changed

+67
-53
lines changed

2 files changed

+67
-53
lines changed

bigquery/spec/bigquery_sample_spec.rb

Lines changed: 52 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -28,35 +28,40 @@
2828
@bucket = @storage.bucket ENV["GOOGLE_CLOUD_STORAGE_BUCKET"]
2929
@tempfiles = []
3030

31-
# Examples assume that newly created test_dataset and test_table exist
32-
delete_test_dataset!
31+
file_time = Time.now.to_i
32+
@file_name = "bigquery-test_#{file_time}"
33+
@dataset_name = "test_dataset_#{file_time}"
34+
@table_name = "test_table_#{file_time}"
3335

34-
@dataset = @bigquery.create_dataset "test_dataset"
35-
@table = @dataset.create_table "test_table" do |schema|
36+
@dataset = @bigquery.create_dataset @dataset_name
37+
@table = @dataset.create_table @table_name do |schema|
3638
schema.string "name"
3739
schema.integer "value"
3840
end
39-
40-
if @bucket.file "bigquery-test.csv"
41-
@bucket.file("bigquery-test.csv").delete
42-
end
4341
end
4442

4543
after do
4644
# Cleanup any tempfiles that were used by the example spec
4745
@tempfiles.each &:flush
4846
@tempfiles.each &:close
47+
48+
# delete csv file and dataset
49+
delete_test_dataset!
50+
51+
if @bucket.file "#{@file_name}.csv"
52+
@bucket.file("#{@file_name}.csv").delete
53+
end
4954
end
5055

5156
def delete_test_dataset!
52-
dataset = @bigquery.dataset "test_dataset"
57+
dataset = @bigquery.dataset @dataset_name
5358
dataset.tables.each &:delete if dataset
5459
dataset.delete if dataset
5560
end
5661

5762
# Helper to create Tempfile that will be cleaned up after test run
5863
def create_tempfile extension = "txt"
59-
file = Tempfile.new [ "bigquery-test", ".#{extension}" ]
64+
file = Tempfile.new [ @file_name, ".#{extension}" ]
6065
@tempfiles << file
6166
file
6267
end
@@ -106,76 +111,79 @@ def wait_until times: 5, delay: 1, &condition
106111
describe "Managing Datasets" do
107112
example "create dataset" do
108113
delete_test_dataset!
109-
expect(@bigquery.dataset "test_dataset").to be nil
114+
expect(@bigquery.dataset @dataset_name).to be nil
110115

111116
expect {
112-
create_dataset project_id: @project_id, dataset_id: "test_dataset"
117+
create_dataset project_id: @project_id,
118+
dataset_id: @dataset_name
113119
}.to output(
114-
"Created dataset: test_dataset\n"
120+
"Created dataset: #{@dataset_name}\n"
115121
).to_stdout
116122

117-
expect(@bigquery.dataset "test_dataset").not_to be nil
123+
expect(@bigquery.dataset @dataset_name).not_to be nil
118124
end
119125

120126
example "list datasets" do
121127
expect {
122128
list_datasets project_id: @project_id
123129
}.to output(
124-
/test_dataset/
130+
/#{@dataset_name}/
125131
).to_stdout
126132
end
127133

128134
example "delete dataset" do
129135
@dataset.tables.each &:delete
130-
expect(@bigquery.dataset "test_dataset").not_to be nil
136+
expect(@bigquery.dataset @dataset_name).not_to be nil
131137

132138
expect {
133-
delete_dataset project_id: @project_id, dataset_id: "test_dataset"
139+
delete_dataset project_id: @project_id,
140+
dataset_id: @dataset_name
134141
}.to output(
135-
"Deleted dataset: test_dataset\n"
142+
"Deleted dataset: #{@dataset_name}\n"
136143
).to_stdout
137144

138-
expect(@bigquery.dataset "test_dataset").to be nil
145+
expect(@bigquery.dataset @dataset_name).to be nil
139146
end
140147
end
141148

142149
describe "Managing Tables" do
143150

144151
example "create table" do
145152
@table.delete
146-
expect(@dataset.table "test_table").to be nil
153+
expect(@dataset.table @table_name).to be nil
147154

148155
expect {
149156
create_table project_id: @project_id,
150-
dataset_id: "test_dataset",
151-
table_id: "test_table"
157+
dataset_id: @dataset_name,
158+
table_id: @table_name
152159
}.to output(
153-
"Created table: test_table\n"
160+
"Created table: #{@table_name}\n"
154161
).to_stdout
155162

156-
expect(@dataset.table "test_table").not_to be nil
163+
expect(@dataset.table @table_name).not_to be nil
157164
end
158165

159166
example "list tables" do
160167
expect {
161-
list_tables project_id: @project_id, dataset_id: "test_dataset"
168+
list_tables project_id: @project_id,
169+
dataset_id: @dataset_name
162170
}.to output(
163-
/test_table/
171+
/#{@table_name}/
164172
).to_stdout
165173
end
166174

167175
example "delete table" do
168-
expect(@dataset.table "test_table").not_to be nil
176+
expect(@dataset.table @table_name).not_to be nil
169177

170178
expect {
171179
delete_table project_id: @project_id,
172-
dataset_id: "test_dataset",
173-
table_id: "test_table"
180+
dataset_id: @dataset_name,
181+
table_id: @table_name
174182
}.to output(
175-
"Deleted table: test_table\n"
183+
"Deleted table: #{@table_name}\n"
176184
).to_stdout
177185

178-
expect(@dataset.table "test_table").to be nil
186+
expect(@dataset.table @table_name).to be nil
179187
end
180188

181189
example "list table data" do
@@ -188,8 +196,8 @@ def wait_until times: 5, delay: 1, &condition
188196

189197
expect {
190198
list_table_data project_id: @project_id,
191-
dataset_id: "test_dataset",
192-
table_id: "test_table"
199+
dataset_id: @dataset_name,
200+
table_id: @table_name
193201
}.to output(
194202
"name = Alice\nvalue = 5\nname = Bob\nvalue = 10\n"
195203
).to_stdout
@@ -209,9 +217,9 @@ def wait_until times: 5, delay: 1, &condition
209217
expect(@table.data).to be_empty
210218

211219
capture do
212-
import_table_data_from_file project_id: @project_id,
213-
dataset_id: "test_dataset",
214-
table_id: "test_table",
220+
import_table_data_from_file project_id: @project_id,
221+
dataset_id: @dataset_name,
222+
table_id: @table_name,
215223
local_file_path: csv_file.path
216224
end
217225

@@ -237,7 +245,7 @@ def wait_until times: 5, delay: 1, &condition
237245
csv << [ "Bob", 10 ]
238246
end
239247

240-
file = @bucket.create_file csv_file.path, "bigquery-test.csv"
248+
file = @bucket.create_file csv_file.path, "#{@file_name}.csv"
241249

242250
expect(@table.data).to be_empty
243251

@@ -246,13 +254,13 @@ def wait_until times: 5, delay: 1, &condition
246254
project_id: @project_id,
247255
dataset_id: @dataset.dataset_id,
248256
table_id: @table.table_id,
249-
storage_path: "gs://#{@bucket.name}/bigquery-test.csv"
257+
storage_path: "gs://#{@bucket.name}/#{@file_name}.csv"
250258
)
251259
end
252260

253261
expect(captured_output).to include(
254262
"Importing data from Cloud Storage file: " +
255-
"gs://#{@bucket.name}/bigquery-test.csv"
263+
"gs://#{@bucket.name}/#{@file_name}.csv"
256264
)
257265
expect(captured_output).to match(
258266
/Waiting for load job to complete: job/
@@ -307,30 +315,30 @@ def wait_until times: 5, delay: 1, &condition
307315

308316
@table.load(csv_file.path).wait_until_done!
309317

310-
expect(@bucket.file "bigquery-test.csv").to be nil
318+
expect(@bucket.file "#{@file_name}.csv").to be nil
311319

312320
capture do
313321
export_table_data_to_cloud_storage(
314322
project_id: @project_id,
315323
dataset_id: @dataset.dataset_id,
316324
table_id: @table.table_id,
317-
storage_path: "gs://#{@bucket.name}/bigquery-test.csv"
325+
storage_path: "gs://#{@bucket.name}/#{@file_name}.csv"
318326
)
319327
end
320328

321329
expect(captured_output).to include(
322330
"Exporting data to Cloud Storage file: " +
323-
"gs://#{@bucket.name}/bigquery-test.csv"
331+
"gs://#{@bucket.name}/#{@file_name}.csv"
324332
)
325333
expect(captured_output).to match(
326334
/Waiting for extract job to complete: job/
327335
)
328336
expect(captured_output).to include "Data exported"
329337

330-
expect(@bucket.file "bigquery-test.csv").not_to be nil
338+
expect(@bucket.file "#{@file_name}.csv").not_to be nil
331339

332340
local_file = create_tempfile "csv"
333-
@bucket.file("bigquery-test.csv").download local_file.path
341+
@bucket.file("#{@file_name}.csv").download local_file.path
334342

335343
csv = CSV.read local_file.path
336344

bigquery/spec/quickstart_spec.rb

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -18,25 +18,31 @@
1818
describe "BigQuery Quickstart" do
1919

2020
it "creates a new dataset" do
21-
gcloud = Google::Cloud.new ENV["GOOGLE_CLOUD_PROJECT"]
22-
bigquery = gcloud.bigquery
21+
gcloud = Google::Cloud.new ENV["GOOGLE_CLOUD_PROJECT"]
22+
bigquery = gcloud.bigquery
23+
dataset_name = "my_new_dataset_#{Time.now.to_i}"
2324

24-
if bigquery.dataset "my_new_dataset"
25-
bigquery.dataset("my_new_dataset").delete
26-
end
27-
28-
expect(bigquery.dataset "my_new_dataset").to be nil
25+
expect(bigquery.dataset dataset_name).to be nil
2926
expect(Google::Cloud).to receive(:new).
3027
with("YOUR_PROJECT_ID").
3128
and_return(gcloud)
29+
expect(gcloud).to receive(:bigquery).and_return(bigquery)
30+
31+
expect(bigquery).to receive(:create_dataset).
32+
with("my_new_dataset").
33+
and_wrap_original do |m, *args|
34+
m.call(dataset_name)
35+
end
3236

3337
expect {
3438
load File.expand_path("../quickstart.rb", __dir__)
3539
}.to output(
36-
"Dataset my_new_dataset created\.\n"
40+
"Dataset #{dataset_name} created\.\n"
3741
).to_stdout
3842

39-
expect(bigquery.dataset "my_new_dataset").not_to be nil
43+
expect(bigquery.dataset dataset_name).not_to be nil
44+
45+
bigquery.dataset(dataset_name).delete
4046
end
4147

4248
end

0 commit comments

Comments
 (0)