Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
require_relative "../storage_batch_create_job"
require_relative "../storage_batch_delete_job"
require_relative "../storage_batch_cancel_job"
require_relative "../storage_batch_list_job"
require_relative "../storage_batch_list_jobs"
require_relative "../storage_batch_get_job"

describe "Storage Batch Operations" do
Expand All @@ -40,22 +40,22 @@
job_name = "#{job_name_prefix}#{job_id}"

# Create job
assert_output(/The #{job_id} is created./) do
assert_output(/Storage Batch Operations job #{job_name} is created./) do
create_job bucket_name: bucket_name, prefix: "ruby_file", job_id: job_id, project_id: project_id
end

# List jobs
assert_output(/Job name: #{job_name} present in the list/) do
list_job project_id: project_id
list_jobs project_id: project_id
end

# Get job details
assert_output(/Found job_name- #{job_name}, job_status- /) do
assert_output(/Storage Batch Operations job Found - #{job_name}, job_status- /) do
get_job project_id: project_id, job_id: job_id
end

# Cancel job
expected_output_pattern = /The #{job_id} is canceled\.|#{job_id} was already completed or was not created\./
expected_output_pattern = /Storage Batch Operations job #{job_name} (is canceled|was already completed)\./
assert_output expected_output_pattern do
cancel_job project_id: project_id, job_id: job_id
end
Expand Down
26 changes: 17 additions & 9 deletions google-cloud-storage_batch_operations/samples/acceptance/helper.rb
Original file line number Diff line number Diff line change
Expand Up @@ -42,16 +42,24 @@ def delete_bucket_helper bucket_name
end

def retry_resource_exhaustion
5.times do
return yield
rescue Google::Cloud::ResourceExhaustedError => e
puts "\n#{e} Gonna try again"
sleep rand(10..16)
rescue StandardError => e
puts "\n#{e}"
raise e
attempts = 5
start_time = Time.now
last_error = nil

attempts.times do |i|
begin
return yield
rescue Google::Cloud::ResourceExhaustedError => e
last_error = e
puts "\nAttempt #{i + 1} failed with #{e.class}. Retrying..."
sleep rand(10..16)
rescue StandardError => e
raise e
end
end
raise Google::Cloud::ResourceExhaustedError, "Maybe take a break from creating and deleting buckets for a bit"

elapsed_time = Time.now - start_time
raise last_error, "Failed after #{attempts} attempts in #{elapsed_time.round 2} seconds. Last error: #{last_error.message}", last_error.backtrace
end

def random_bucket_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,23 +15,39 @@
# [START storage_batch_cancel_job]
require "google/cloud/storage_batch_operations"

# Cancels a Storage Batch Operations job.
#
# Note: If the job is already completed or does not exist, a message indicating
# this will be printed instead of raising an error.
#
# @param project_id [String] The ID of your Google Cloud project.
# @param job_id [String] The ID of the Storage Batch Operations job to cancel.
#
# @example
# cancel_job project_id: "your-project-id", job_id: "your-job-id"
#
def cancel_job project_id:, job_id:
# The ID of your project
# project_id = "your-project-id"

# The ID of your Storage batch operation job
# job_id = "your-job-id"

client = Google::Cloud::StorageBatchOperations.storage_batch_operations
parent = "projects/#{project_id}/locations/global"

request = Google::Cloud::StorageBatchOperations::V1::CancelJobRequest.new name: "#{parent}/jobs/#{job_id}"

# To fetch job details using get_job
get_request = Google::Cloud::StorageBatchOperations::V1::GetJobRequest.new name: "#{parent}/jobs/#{job_id}"

begin
client.cancel_job request
message = "The #{job_id} is canceled."
rescue Google::Cloud::FailedPreconditionError, Google::Cloud::NotFoundError
## We will get error if the job was already completed. this is an expected outcome
message = "#{job_id} was already completed or was not created."
result = client.cancel_job request
## Fetch the job
job_detail = client.get_job get_request
message = "Storage Batch Operations job #{job_detail.name} is canceled."
rescue Google::Cloud::FailedPreconditionError
## Fetch the job
job_detail = client.get_job get_request
# This error is thrown when the job is already completed.
message = "Storage Batch Operations job #{job_detail.name} was already completed."
rescue StandardError
# This error is thrown when the job is not canceled.
message = "Failed to cancel job #{job_id}. Error: #{result.error.message}"
end
puts message
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,20 +15,25 @@
# [START storage_batch_create_job]
require "google/cloud/storage_batch_operations"

# Creates a Storage Batch Operations job to delete objects in a bucket
# that match a given prefix. The deletion is a "soft delete", meaning
# objects can be recovered if versioning is enabled on the bucket.
#
# @param bucket_name [String] The name of the Google Cloud Storage bucket.
# @param prefix [String] The prefix of the objects to be included in the job.
# The job will operate on all objects whose names start with this prefix.
# @param job_id [String] A unique identifier for the job.
# @param project_id [String] The ID of the Google Cloud project where the job will be created.
#
# @example
# create_job(
# bucket_name: "your-unique-bucket-name",
# prefix: "test-files/",
# job_id: "your-job-id",
# project_id: "your-project-id"
# )
#
def create_job bucket_name:, prefix:, job_id:, project_id:
# The name of your GCS bucket
# bucket_name = "your-unique-bucket-name"

# Prefix is the first part of filename on which job has to be executed
# prefix = 'test'

# The ID of your Storage batch operation job
# job_id = "your-job-id"

# The ID of your project
# project_id = "your-project-id"


client = Google::Cloud::StorageBatchOperations.storage_batch_operations

parent = "projects/#{project_id}/locations/global"
Expand Down Expand Up @@ -60,12 +65,18 @@ def create_job bucket_name:, prefix:, job_id:, project_id:
request = Google::Cloud::StorageBatchOperations::V1::CreateJobRequest.new parent: parent, job_id: job_id, job: job
create_job_operation = client.create_job request

# To fetch job details using get_job to confirm creation
get_request = Google::Cloud::StorageBatchOperations::V1::GetJobRequest.new name: "#{parent}/jobs/#{job_id}"

begin
## Waiting for operation to complete
create_job_operation.wait_until_done!
message = "The #{job_id} is created."
## Fetch the newly created job to confirm creation
job_detail = client.get_job get_request
message = "Storage Batch Operations job #{job_detail.name} is created."
rescue StandardError
message = " #{job_id} not created"
# This error is thrown when the job is not created.
message = "Failed to create job #{job_id}. Error: #{create_job_operation.error.message}"
end
puts message
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,24 +15,26 @@
# [START storage_batch_delete_job]
require "google/cloud/storage_batch_operations"

# Deletes a Storage Batch Operations job.
#
# @param project_id [String] The ID of your Google Cloud project.
# @param job_id [String] The ID of the Storage Batch Operations job to be deleted.
#
# @example
# delete_job project_id: "your-project-id", job_id: "your-job-id"
#
def delete_job project_id:, job_id:
# The ID of your project
# project_id = "your-project-id"

# The ID of your Storage batch operation job
# job_id = "your-job-id"

client = Google::Cloud::StorageBatchOperations.storage_batch_operations
parent = "projects/#{project_id}/locations/global"
request = Google::Cloud::StorageBatchOperations::V1::DeleteJobRequest.new name: "#{parent}/jobs/#{job_id}"

begin
client.delete_job request
result = client.delete_job request
message = "The #{job_id} is deleted."
rescue Google::Cloud::NotFoundError
message = "Job #{job_id} not found."
rescue StandardError
# This error is thrown when the job is not deleted.
message = "Failed to delete job #{job_id}. Error: #{result.error.message}"
end

puts message
end
# [END storage_batch_delete_job]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,21 +15,24 @@
# [START storage_batch_get_job]
require "google/cloud/storage_batch_operations"

def get_job project_id:, job_id:
# The ID of your project
# project_id = "your-project-id"

# The ID of your Storage batch operation job
# job_id= "your-job-id"
# Gets a Storage Batch Operations job.
#
# @example
# get_job project_id: "your-project-id", job_id: "your-job-id"
#
# @param project_id [String] The ID of your Google Cloud project.
# @param job_id [String] The ID of your Storage Batch Operations job.

def get_job project_id:, job_id:
client = Google::Cloud::StorageBatchOperations.storage_batch_operations
parent = "projects/#{project_id}/locations/global"
request = Google::Cloud::StorageBatchOperations::V1::GetJobRequest.new name: "#{parent}/jobs/#{job_id}"
begin
result = client.get_job request
message = "Found job_name- #{result.name}, job_status- #{result.state}"
rescue Google::Cloud::NotFoundError
message = "Job #{job_id} not found."
message = "Storage Batch Operations job Found - #{result.name}, job_status- #{result.state}"
rescue StandardError
# This error is thrown when the job is not found.
message = "Failed to fetch job #{job_id}. Error: #{result.error.message}"
end
puts message
end
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,18 +12,25 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# [START storage_batch_list_job]
# [START storage_batch_list_jobs]
require "google/cloud/storage_batch_operations"

def list_job project_id:
# The ID of your project
# project_id = "your-project-id"
# Lists Storage Batch Operations jobs for a given project.
#
# @param project_id [String] The ID of your Google Cloud project.
#
# @example
# list_jobs project_id: "your-project-id"
#

def list_jobs project_id:

client = Google::Cloud::StorageBatchOperations.storage_batch_operations
parent = "projects/#{project_id}/locations/global"
request = Google::Cloud::StorageBatchOperations::V1::ListJobsRequest.new parent: parent, page_size: 10
job_list = client.list_jobs request
job_list.each { |job| puts "Job name: #{job.name} present in the list" }
end
# [END storage_batch_list_job]
# [END storage_batch_list_jobs]

list_job project_id: ARGV.shift if $PROGRAM_NAME == __FILE__
list_jobs project_id: ARGV.shift if $PROGRAM_NAME == __FILE__