diff --git a/lib/cloud_controller/blobstore/client_provider.rb b/lib/cloud_controller/blobstore/client_provider.rb index a75ec23d29f..d18468bea75 100644 --- a/lib/cloud_controller/blobstore/client_provider.rb +++ b/lib/cloud_controller/blobstore/client_provider.rb @@ -1,9 +1,10 @@ require 'cloud_controller/blobstore/client' require 'cloud_controller/blobstore/retryable_client' require 'cloud_controller/blobstore/fog/fog_client' -require 'cloud_controller/blobstore/fog/error_handling_client' +require 'cloud_controller/blobstore/error_handling_client' require 'cloud_controller/blobstore/webdav/dav_client' require 'cloud_controller/blobstore/safe_delete_client' +require 'cloud_controller/blobstore/storage_cli/storage_cli_client' require 'google/apis/errors' module CloudController @@ -12,6 +13,9 @@ class ClientProvider def self.provide(options:, directory_key:, root_dir: nil, resource_type: nil) if options[:blobstore_type].blank? || (options[:blobstore_type] == 'fog') provide_fog(options, directory_key, root_dir) + elsif options[:blobstore_type] == 'storage-cli' + # storage-cli is an experimental feature and not yet fully implemented. !!! DO NOT USE IN PRODUCTION !!! + provide_storage_cli(options, directory_key, root_dir) else provide_webdav(options, directory_key, root_dir) end @@ -65,6 +69,22 @@ def provide_webdav(options, directory_key, root_dir) Client.new(SafeDeleteClient.new(retryable_client, root_dir)) end + + def provide_storage_cli(options, directory_key, root_dir) + raise BlobstoreError.new('connection_config for storage-cli is not provided') unless options[:connection_config] + + client = StorageCliClient.build(connection_config: options.fetch(:connection_config), + directory_key: directory_key, + root_dir: root_dir, + min_size: options[:minimum_size], + max_size: options[:maximum_size]) + + logger = Steno.logger('cc.blobstore.storage_cli_client') + errors = [StandardError] + retryable_client = RetryableClient.new(client:, errors:, logger:) + + Client.new(SafeDeleteClient.new(retryable_client, root_dir)) + end end end end diff --git a/lib/cloud_controller/blobstore/fog/error_handling_client.rb b/lib/cloud_controller/blobstore/error_handling_client.rb similarity index 100% rename from lib/cloud_controller/blobstore/fog/error_handling_client.rb rename to lib/cloud_controller/blobstore/error_handling_client.rb diff --git a/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb b/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb new file mode 100644 index 00000000000..1ac93b30848 --- /dev/null +++ b/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client.rb @@ -0,0 +1,20 @@ +module CloudController + module Blobstore + class AzureStorageCliClient < StorageCliClient + def cli_path + ENV['AZURE_STORAGE_CLI_PATH'] || '/var/vcap/packages/azure-storage-cli/bin/azure-storage-cli' + end + + def build_config(connection_config) + { + account_name: connection_config[:azure_storage_account_name], + account_key: connection_config[:azure_storage_access_key], + container_name: @directory_key, + environment: connection_config[:environment] + }.compact + end + + CloudController::Blobstore::StorageCliClient.register('AzureRM', AzureStorageCliClient) + end + end +end diff --git a/lib/cloud_controller/blobstore/storage_cli/storage_cli_blob.rb b/lib/cloud_controller/blobstore/storage_cli/storage_cli_blob.rb new file mode 100644 index 00000000000..b16f9f19777 --- /dev/null +++ b/lib/cloud_controller/blobstore/storage_cli/storage_cli_blob.rb @@ -0,0 +1,43 @@ +module CloudController + module Blobstore + class StorageCliBlob < Blob + attr_reader :key + + def initialize(key, properties: nil, signed_url: nil) + @key = key + @signed_url = signed_url if signed_url + # Set properties to an empty hash if nil to avoid nil errors + @properties = properties || {} + end + + def internal_download_url + signed_url + end + + def public_download_url + signed_url + end + + def attributes(*keys) + @attributes ||= { + etag: @properties.fetch('etag', nil), + last_modified: @properties.fetch('last_modified', nil), + content_length: @properties.fetch('content_length', nil), + created_at: @properties.fetch('created_at', nil) + } + + return @attributes if keys.empty? + + @attributes.select { |key, _| keys.include? key } + end + + private + + def signed_url + raise BlobstoreError.new('StorageCliBlob not configured with a signed URL') unless @signed_url + + @signed_url + end + end + end +end diff --git a/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb b/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb new file mode 100644 index 00000000000..2bce9d93606 --- /dev/null +++ b/lib/cloud_controller/blobstore/storage_cli/storage_cli_client.rb @@ -0,0 +1,221 @@ +require 'open3' +require 'tempfile' +require 'tmpdir' +require 'fileutils' +require 'cloud_controller/blobstore/base_client' +require 'cloud_controller/blobstore/storage_cli/storage_cli_blob' + +module CloudController + module Blobstore + class StorageCliClient < BaseClient + attr_reader :root_dir, :min_size, :max_size + + @registry = {} + + class << self + attr_reader :registry + + def register(provider, klass) + registry[provider] = klass + end + + def build(connection_config:, directory_key:, root_dir:, min_size: nil, max_size: nil) + provider = connection_config[:provider] + raise 'Missing connection_config[:provider]' if provider.nil? + + impl_class = registry[provider] + raise "No storage CLI client registered for provider #{provider}" unless impl_class + + impl_class.new(connection_config:, directory_key:, root_dir:, min_size:, max_size:) + end + end + + def initialize(connection_config:, directory_key:, root_dir:, min_size: nil, max_size: nil) + @cli_path = cli_path + @directory_key = directory_key + @root_dir = root_dir + @min_size = min_size || 0 + @max_size = max_size + config = build_config(connection_config) + @config_file = write_config_file(config) + @fork = connection_config.fetch(:fork, false) + end + + def local? + false + end + + def exists?(blobstore_key) + key = partitioned_key(blobstore_key) + _, status = run_cli('exists', key, allow_exit_code_three: true) + + if status.exitstatus == 0 + return true + elsif status.exitstatus == 3 + return false + end + + false + end + + def download_from_blobstore(source_key, destination_path, mode: nil) + FileUtils.mkdir_p(File.dirname(destination_path)) + run_cli('get', partitioned_key(source_key), destination_path) + + File.chmod(mode, destination_path) if mode + end + + def cp_to_blobstore(source_path, destination_key) + start = Time.now.utc + log_entry = 'cp-skip' + size = -1 + + logger.info('cp-start', destination_key: destination_key, source_path: source_path, bucket: @directory_key) + + File.open(source_path) do |file| + size = file.size + next unless within_limits?(size) + + run_cli('put', source_path, partitioned_key(destination_key)) + log_entry = 'cp-finish' + end + + duration = Time.now.utc - start + logger.info(log_entry, + destination_key: destination_key, + duration_seconds: duration, + size: size) + end + + def cp_file_between_keys(source_key, destination_key) + if @fork + run_cli('copy', partitioned_key(source_key), partitioned_key(destination_key)) + else + # Azure CLI doesn't support server-side copy yet, so fallback to local copy + Tempfile.create('blob-copy') do |tmp| + download_from_blobstore(source_key, tmp.path) + cp_to_blobstore(tmp.path, destination_key) + end + end + end + + def delete_all(_=nil) + # page_size is currently not considered. Azure SDK / API has a limit of 5000 + pass unless @fork + + # Currently, storage-cli does not support bulk deletion. + run_cli('delete-recursive', @root_dir) + end + + def delete_all_in_path(path) + pass unless @fork + + # Currently, storage-cli does not support bulk deletion. + run_cli('delete-recursive', partitioned_key(path)) + end + + def delete(key) + run_cli('delete', partitioned_key(key)) + end + + def delete_blob(blob) + delete(blob.key) + end + + def blob(key) + if @fork + properties = properties(key) + return nil if properties.nil? || properties.empty? + + signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600) + StorageCliBlob.new(key, properties:, signed_url:) + elsif exists?(key) + # Azure CLI does not support getting blob properties directly, so fallback to local check + signed_url = sign_url(partitioned_key(key), verb: 'get', expires_in_seconds: 3600) + StorageCliBlob.new(key, signed_url:) + end + end + + def files_for(prefix, _ignored_directory_prefixes=[]) + return nil unless @fork + + files, _status = run_cli('list', prefix) + files.split("\n").map(&:strip).reject(&:empty?).map { |file| StorageCliBlob.new(file) } + end + + def ensure_bucket_exists + return unless @fork + + run_cli('ensure-bucket-exists') + end + + private + + def run_cli(command, *args, allow_exit_code_three: false) + logger.info("[storage_cli_client] Running storage-cli: #{@cli_path} -c #{@config_file} #{command} #{args.join(' ')}") + + begin + stdout, stderr, status = Open3.capture3(@cli_path, '-c', @config_file, command, *args) + rescue StandardError => e + raise BlobstoreError.new(e.inspect) + end + + unless status.success? || (allow_exit_code_three && status.exitstatus == 3) + raise "storage-cli #{command} failed with exit code #{status.exitstatus}, output: '#{stdout}', error: '#{stderr}'" + end + + [stdout, status] + end + + def sign_url(key, verb:, expires_in_seconds:) + stdout, _status = run_cli('sign', key, verb.to_s.downcase, "#{expires_in_seconds}s") + stdout.strip + end + + def properties(key) + stdout, _status = run_cli('properties', partitioned_key(key)) + # stdout is expected to be in JSON format - raise an error if it is nil, empty or something unexpected + raise BlobstoreError.new("Properties command returned empty output for key: #{key}") if stdout.nil? || stdout.empty? + + begin + properties = Oj.load(stdout) + rescue StandardError => e + raise BlobstoreError.new("Failed to parse json properties for key: #{key}, error: #{e.message}") + end + + properties + end + + def cli_path + raise NotImplementedError + end + + def build_config(connection_config) + raise NotImplementedError + end + + def write_config_file(config) + # TODO: Consider to move the config generation into capi-release + config_dir = File.join(tmpdir, 'blobstore-configs') + FileUtils.mkdir_p(config_dir) + + config_file_path = File.join(config_dir, "#{@directory_key}.json") + File.open(config_file_path, 'w', 0o600) do |f| + f.write(Oj.dump(config.transform_keys(&:to_s))) + end + config_file_path + end + + def tmpdir + VCAP::CloudController::Config.config.get(:directories, :tmpdir) + rescue StandardError + # Fallback to a temporary directory if the config is not set (e.g. for cc-deployment-updater + Dir.mktmpdir('cc_blobstore') + end + + def logger + @logger ||= Steno.logger('cc.blobstore.storage_cli_client') + end + end + end +end diff --git a/lib/cloud_controller/config_schemas/api_schema.rb b/lib/cloud_controller/config_schemas/api_schema.rb index ecbd65d3f84..8eb4be21525 100644 --- a/lib/cloud_controller/config_schemas/api_schema.rb +++ b/lib/cloud_controller/config_schemas/api_schema.rb @@ -198,6 +198,7 @@ class ApiSchema < VCAP::Config minimum_size: Integer, resource_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -205,6 +206,7 @@ class ApiSchema < VCAP::Config buildpacks: { buildpack_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -214,6 +216,7 @@ class ApiSchema < VCAP::Config max_valid_packages_stored: Integer, app_package_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -222,6 +225,7 @@ class ApiSchema < VCAP::Config droplet_directory_key: String, max_staged_droplets_stored: Integer, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, diff --git a/lib/cloud_controller/config_schemas/clock_schema.rb b/lib/cloud_controller/config_schemas/clock_schema.rb index f240d31a104..c2499cd23e2 100644 --- a/lib/cloud_controller/config_schemas/clock_schema.rb +++ b/lib/cloud_controller/config_schemas/clock_schema.rb @@ -109,6 +109,7 @@ class ClockSchema < VCAP::Config minimum_size: Integer, resource_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -116,6 +117,7 @@ class ClockSchema < VCAP::Config buildpacks: { buildpack_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -124,6 +126,7 @@ class ClockSchema < VCAP::Config max_package_size: Integer, app_package_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -131,6 +134,7 @@ class ClockSchema < VCAP::Config droplets: { droplet_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, diff --git a/lib/cloud_controller/config_schemas/deployment_updater_schema.rb b/lib/cloud_controller/config_schemas/deployment_updater_schema.rb index 5c0402dc22b..d0b18faeb69 100644 --- a/lib/cloud_controller/config_schemas/deployment_updater_schema.rb +++ b/lib/cloud_controller/config_schemas/deployment_updater_schema.rb @@ -109,6 +109,7 @@ class DeploymentUpdaterSchema < VCAP::Config minimum_size: Integer, resource_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -116,6 +117,7 @@ class DeploymentUpdaterSchema < VCAP::Config buildpacks: { buildpack_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -124,6 +126,7 @@ class DeploymentUpdaterSchema < VCAP::Config max_package_size: Integer, app_package_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -131,6 +134,7 @@ class DeploymentUpdaterSchema < VCAP::Config droplets: { droplet_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, diff --git a/lib/cloud_controller/config_schemas/worker_schema.rb b/lib/cloud_controller/config_schemas/worker_schema.rb index e8995922d04..86053bbff69 100644 --- a/lib/cloud_controller/config_schemas/worker_schema.rb +++ b/lib/cloud_controller/config_schemas/worker_schema.rb @@ -100,6 +100,7 @@ class WorkerSchema < VCAP::Config minimum_size: Integer, resource_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -107,6 +108,7 @@ class WorkerSchema < VCAP::Config buildpacks: { buildpack_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -116,6 +118,7 @@ class WorkerSchema < VCAP::Config max_valid_packages_stored: Integer, app_package_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, @@ -123,6 +126,7 @@ class WorkerSchema < VCAP::Config droplets: { droplet_directory_key: String, fog_connection: Hash, + optional(:connection_config) => Hash, fog_aws_storage_options: Hash, fog_gcp_storage_options: Hash }, diff --git a/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb b/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb index 713fe60f726..be6f9b6c95a 100644 --- a/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb +++ b/spec/unit/lib/cloud_controller/blobstore/client_provider_spec.rb @@ -125,6 +125,28 @@ module Blobstore expect(DavClient).to have_received(:new) end end + + context 'when storage-cli is requested' do + let(:blobstore_type) { 'storage-cli' } + let(:directory_key) { 'some-bucket' } + let(:root_dir) { 'some-root-dir' } + let(:storage_cli_client_mock) { class_double(CloudController::Blobstore::StorageCliClient) } + + before do + options.merge!(connection_config: {}, minimum_size: 100, maximum_size: 1000) + end + + it 'provides a storage-cli client' do + allow(StorageCliClient).to receive(:build).and_return(storage_cli_client_mock) + ClientProvider.provide(options:, directory_key:, root_dir:) + expect(StorageCliClient).to have_received(:build).with(connection_config: {}, directory_key: directory_key, root_dir: root_dir, min_size: 100, max_size: 1000) + end + + it 'raises an error if connection_config is not provided' do + options.delete(:connection_config) + expect { ClientProvider.provide(options:, directory_key:, root_dir:) }.to raise_error(BlobstoreError, 'connection_config for storage-cli is not provided') + end + end end end end diff --git a/spec/unit/lib/cloud_controller/blobstore/fog/error_handling_client_spec.rb b/spec/unit/lib/cloud_controller/blobstore/error_handling_client_spec.rb similarity index 98% rename from spec/unit/lib/cloud_controller/blobstore/fog/error_handling_client_spec.rb rename to spec/unit/lib/cloud_controller/blobstore/error_handling_client_spec.rb index 4b838078ae5..760306a8457 100644 --- a/spec/unit/lib/cloud_controller/blobstore/fog/error_handling_client_spec.rb +++ b/spec/unit/lib/cloud_controller/blobstore/error_handling_client_spec.rb @@ -1,6 +1,6 @@ require 'spec_helper' -require_relative '../client_shared' -require 'cloud_controller/blobstore/fog/error_handling_client' +require_relative 'client_shared' +require 'cloud_controller/blobstore/error_handling_client' require 'cloud_controller/blobstore/null_client' module CloudController diff --git a/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb b/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb new file mode 100644 index 00000000000..c1e097bc4bd --- /dev/null +++ b/spec/unit/lib/cloud_controller/blobstore/storage_cli/azure_storage_cli_client_spec.rb @@ -0,0 +1,203 @@ +require 'spec_helper' +require_relative '../client_shared' +require 'cloud_controller/blobstore/storage_cli/azure_storage_cli_client' +require 'cloud_controller/blobstore/storage_cli/storage_cli_blob' + +module CloudController + module Blobstore + RSpec.describe AzureStorageCliClient do + subject(:client) { AzureStorageCliClient.new(connection_config: connection_config, directory_key: directory_key, root_dir: 'bommel') } + let(:directory_key) { 'my-bucket' } + let(:connection_config) do + { + azure_storage_access_key: 'some-access-key', + azure_storage_account_name: 'some-account-name', + container_name: directory_key, + environment: 'AzureCloud', + provider: 'AzureRM', + fork: true + } + end + let(:downloaded_file) do + Tempfile.open('') do |tmpfile| + tmpfile.write('downloaded file content') + tmpfile + end + end + + let(:deletable_blob) { StorageCliBlob.new('deletable-blob') } + let(:dest_path) { File.join(Dir.mktmpdir, SecureRandom.uuid) } + + describe 'conforms to the blobstore client interface' do + before do + allow(client).to receive(:run_cli).with('exists', anything, allow_exit_code_three: true).and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('get', anything, anything).and_wrap_original do |_original_method, _cmd, _source, dest_path| + File.write(dest_path, 'downloaded content') + [nil, instance_double(Process::Status, exitstatus: 0)] + end + allow(client).to receive(:run_cli).with('put', anything, anything).and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('copy', anything, anything).and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('delete', anything).and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('delete-recursive', anything).and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('list', anything).and_return(["aa/bb/blob1\ncc/dd/blob2\n", instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('ensure-bucket-exists').and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('properties', anything).and_return(['{"dummy": "json"}', instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('sign', anything, 'get', '3600s').and_return(['some-url', instance_double(Process::Status, exitstatus: 0)]) + end + + it_behaves_like 'a blobstore client' + end + + describe '#local?' do + it 'returns false' do + expect(client.local?).to be false + end + end + + describe 'config file' do + it 'builds a valid config file' do + expect(client.instance_variable_get(:@config_file)).to be_a(String) + expect(File.exist?(client.instance_variable_get(:@config_file))).to be true + expect(File.read(client.instance_variable_get(:@config_file))).to eq( + '{"account_name":"some-account-name","account_key":"some-access-key","container_name":"my-bucket","environment":"AzureCloud"}' + ) + end + end + + describe '#cli_path' do + it 'returns the default CLI path' do + expect(client.cli_path).to eq('/var/vcap/packages/azure-storage-cli/bin/azure-storage-cli') + end + + it 'can be overridden by an environment variable' do + allow(ENV).to receive(:[]).with('AZURE_STORAGE_CLI_PATH').and_return('/custom/path/to/azure-storage-cli') + expect(client.cli_path).to eq('/custom/path/to/azure-storage-cli') + end + end + + describe '#exists?' do + context 'when the blob exists' do + before { allow(client).to receive(:run_cli).with('exists', any_args).and_return([nil, instance_double(Process::Status, exitstatus: 0)]) } + + it('returns true') { expect(client.exists?('some-blob-key')).to be true } + end + + context 'when the blob does not exist' do + before { allow(client).to receive(:run_cli).with('exists', any_args).and_return([nil, instance_double(Process::Status, exitstatus: 3)]) } + + it('returns false') { expect(client.exists?('some-blob-key')).to be false } + end + end + + describe '#files_for' do + context 'when CLI returns multiple files' do + let(:cli_output) { "aa/bb/blob1\ncc/dd/blob2\n" } + + before do + allow(client).to receive(:run_cli). + with('list', 'some-prefix'). + and_return([cli_output, instance_double(Process::Status, success?: true)]) + end + + it 'returns StorageCliBlob instances for each file' do + blobs = client.files_for('some-prefix') + expect(blobs.map(&:key)).to eq(['aa/bb/blob1', 'cc/dd/blob2']) + expect(blobs).to all(be_a(StorageCliBlob)) + end + end + + context 'when CLI returns empty output' do + before do + allow(client).to receive(:run_cli). + with('list', 'some-prefix'). + and_return(["\n", instance_double(Process::Status, success?: true)]) + end + + it 'returns an empty array' do + expect(client.files_for('some-prefix')).to eq([]) + end + end + + context 'when CLI output has extra whitespace' do + let(:cli_output) { "aa/bb/blob1 \n \ncc/dd/blob2\n" } + + before do + allow(client).to receive(:run_cli). + with('list', 'some-prefix'). + and_return([cli_output, instance_double(Process::Status, success?: true)]) + end + + it 'strips and rejects empty lines' do + blobs = client.files_for('some-prefix') + expect(blobs.map(&:key)).to eq(['aa/bb/blob1', 'cc/dd/blob2']) + end + end + end + + describe '#blob' do + let(:properties_json) { '{"etag": "test-etag", "last_modified": "2024-10-01T00:00:00Z", "content_length": 1024}' } + + it 'returns a list of StorageCliBlob instances for a given key' do + allow(client).to receive(:run_cli).with('properties', 'bommel/va/li/valid-blob').and_return([properties_json, instance_double(Process::Status, exitstatus: 0)]) + allow(client).to receive(:run_cli).with('sign', 'bommel/va/li/valid-blob', 'get', '3600s').and_return(['some-url', instance_double(Process::Status, exitstatus: 0)]) + + blob = client.blob('valid-blob') + expect(blob).to be_a(StorageCliBlob) + expect(blob.key).to eq('valid-blob') + expect(blob.attributes(:etag, :last_modified, :content_length)).to eq({ + etag: 'test-etag', + last_modified: '2024-10-01T00:00:00Z', + content_length: 1024 + }) + expect(blob.internal_download_url).to eq('some-url') + expect(blob.public_download_url).to eq('some-url') + end + + it 'raises an error if the cli output is empty' do + allow(client).to receive(:run_cli).with('properties', 'bommel/no/ne/nonexistent-blob').and_return([nil, instance_double(Process::Status, exitstatus: 0)]) + expect { client.blob('nonexistent-blob') }.to raise_error(BlobstoreError, /Properties command returned empty output/) + end + + it 'raises an error if the cli output is not valid JSON' do + allow(client).to receive(:run_cli).with('properties', 'bommel/in/va/invalid-json').and_return(['not a json', instance_double(Process::Status, exitstatus: 0)]) + expect { client.blob('invalid-json') }.to raise_error(BlobstoreError, /Failed to parse json properties/) + end + end + + describe '#run_cli' do + it 'returns output and status on success' do + status = instance_double(Process::Status, success?: true, exitstatus: 0) + allow(Open3).to receive(:capture3).with(anything, '-c', anything, 'list', 'arg1').and_return(['ok', '', status]) + + output, returned_status = client.send(:run_cli, 'list', 'arg1') + expect(output).to eq('ok') + expect(returned_status).to eq(status) + end + + it 'raises an error on failure' do + status = instance_double(Process::Status, success?: false, exitstatus: 1) + allow(Open3).to receive(:capture3).with(anything, '-c', anything, 'list', 'arg1').and_return(['', 'error message', status]) + + expect do + client.send(:run_cli, 'list', 'arg1') + end.to raise_error(RuntimeError, /storage-cli list failed with exit code 1/) + end + + it 'allows exit code 3 if specified' do + status = instance_double(Process::Status, success?: false, exitstatus: 3) + allow(Open3).to receive(:capture3).with(anything, '-c', anything, 'list', 'arg1').and_return(['', 'error message', status]) + + output, returned_status = client.send(:run_cli, 'list', 'arg1', allow_exit_code_three: true) + expect(output).to eq('') + expect(returned_status).to eq(status) + end + + it 'raises BlobstoreError on Open3 failure' do + allow(Open3).to receive(:capture3).and_raise(StandardError.new('Open3 error')) + + expect { client.send(:run_cli, 'list', 'arg1') }.to raise_error(BlobstoreError, /Open3 error/) + end + end + end + end +end diff --git a/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_blob_spec.rb b/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_blob_spec.rb new file mode 100644 index 00000000000..1595f3caa7a --- /dev/null +++ b/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_blob_spec.rb @@ -0,0 +1,80 @@ +require 'spec_helper' +require_relative '../blob_shared' + +module CloudController + module Blobstore + RSpec.describe StorageCliBlob do + let(:properties) { { 'etag' => 'test-blob-etag', 'last_modified' => '2024-10-01T00:00:00Z', 'content_length' => 1024 } } + let(:signed_url) { 'http://signed.example.com/test-blob' } + + subject(:blob) { StorageCliBlob.new('test-blob', properties:, signed_url:) } + + it_behaves_like 'a blob' + + describe '#key' do + it 'returns the key of the blob' do + expect(blob.key).to eq('test-blob') + end + end + + describe 'download_urls' do + it 'returns the internal download URL of the blob' do + expect(blob.internal_download_url).to eq(signed_url) + end + + it 'returns the public download URL of the blob' do + expect(blob.public_download_url).to eq(signed_url) + end + + context 'when signed_url is not provided' do + subject(:blob_without_signed_url) { StorageCliBlob.new('test-blob', properties:) } + + it 'raises an error when accessing internal_download_url' do + expect { blob_without_signed_url.internal_download_url }.to raise_error(BlobstoreError, 'StorageCliBlob not configured with a signed URL') + end + + it 'raises an error when accessing public_download_url' do + expect { blob_without_signed_url.public_download_url }.to raise_error(BlobstoreError, 'StorageCliBlob not configured with a signed URL') + end + end + end + + describe '#attributes' do + it 'returns a hash of attributes for the blob' do + expect(blob.attributes).to eq( + etag: 'test-blob-etag', + last_modified: '2024-10-01T00:00:00Z', + created_at: nil, + content_length: 1024 + ) + end + + it 'returns specific attributes when requested' do + expect(blob.attributes(:etag)).to eq(etag: 'test-blob-etag') + end + + context 'when properties are not provided' do + subject(:blob_without_properties) { StorageCliBlob.new('test-blob', signed_url:) } + + it 'returns an empty hash for attributes' do + expect(blob_without_properties.attributes).to eq({ + etag: nil, + last_modified: nil, + created_at: nil, + content_length: nil + }) + end + + it 'returns nil for specific attributes' do + expect(blob_without_properties.attributes(:etag, :last_modified, :created_at, :content_length)).to eq( + etag: nil, + last_modified: nil, + created_at: nil, + content_length: nil + ) + end + end + end + end + end +end diff --git a/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb b/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb new file mode 100644 index 00000000000..08873e926f6 --- /dev/null +++ b/spec/unit/lib/cloud_controller/blobstore/storage_cli/storage_cli_client_spec.rb @@ -0,0 +1,27 @@ +require 'spec_helper' +require 'cloud_controller/blobstore/storage_cli/azure_storage_cli_client' + +module CloudController + module Blobstore + RSpec.describe StorageCliClient do + describe 'registry build and lookup' do + it 'builds the correct client' do + client_from_registry = StorageCliClient.build(connection_config: { provider: 'AzureRM' }, directory_key: 'dummy-key', root_dir: 'dummy-root') + expect(client_from_registry).to be_a(AzureStorageCliClient) + end + + it 'raises an error for an unregistered provider' do + expect do + StorageCliClient.build(connection_config: { provider: 'UnknownProvider' }, directory_key: 'dummy-key', root_dir: 'dummy-root') + end.to raise_error(RuntimeError, 'No storage CLI client registered for provider UnknownProvider') + end + + it 'raises an error if provider is missing' do + expect do + StorageCliClient.build(connection_config: {}, directory_key: 'dummy-key', root_dir: 'dummy-root') + end.to raise_error(RuntimeError, 'Missing connection_config[:provider]') + end + end + end + end +end