@@ -16,29 +16,82 @@ class << self
1616 attr_reader :registry
1717
1818 def register ( provider , klass )
19- registry [ provider ] = klass
19+ registry [ provider . to_s ] = klass
2020 end
2121
22- def build ( connection_config :, directory_key :, root_dir :, resource_type : nil , min_size : nil , max_size : nil )
23- provider = connection_config [ :provider ]
24- raise 'Missing connection_config[:provider]' if provider . nil?
22+ def build ( directory_key :, root_dir :, resource_type : nil , min_size : nil , max_size : nil )
2523 raise 'Missing resource_type' if resource_type . nil?
2624
27- impl_class = registry [ provider ]
25+ cfg = fetch_and_validate_config! ( resource_type )
26+ provider = cfg [ 'provider' ]
27+
28+ key = provider . to_s
29+ impl_class = registry [ key ] || registry [ key . downcase ] || registry [ key . upcase ]
2830 raise "No storage CLI client registered for provider #{ provider } " unless impl_class
2931
30- impl_class . new ( connection_config :, directory_key :, root_dir :, resource_type :, min_size :, max_size :)
32+ impl_class . new ( provider :, directory_key :, root_dir :, resource_type :, min_size :, max_size :)
33+ end
34+
35+ def fetch_and_validate_config! ( resource_type )
36+ path = config_path_for! ( resource_type )
37+
38+ begin
39+ json = Oj . load ( File . read ( path ) )
40+ rescue StandardError => e
41+ raise BlobstoreError . new ( "Failed to parse storage-cli config JSON at #{ path } : #{ e . message } " )
42+ end
43+
44+ validate_required_keys! ( json , path )
45+ json
46+ end
47+
48+ def config_path_for! ( resource_type )
49+ key =
50+ case resource_type . to_s
51+ when 'droplets' , 'buildpack_cache' then :storage_cli_config_file_droplets
52+ when 'buildpacks' then :storage_cli_config_file_buildpacks
53+ when 'packages' then :storage_cli_config_file_packages
54+ when 'resource_pool' then :storage_cli_config_file_resource_pool
55+ else
56+ raise BlobstoreError . new ( "Unknown resource_type: #{ resource_type } " )
57+ end
58+
59+ path = VCAP ::CloudController ::Config . config . get ( key )
60+ raise BlobstoreError . new ( "storage-cli config file not found or not readable at: #{ path . inspect } " ) unless path && File . file? ( path ) && File . readable? ( path )
61+
62+ path
63+ end
64+
65+ def validate_required_keys! ( json , path )
66+ validate_provider! ( json , path )
67+ required = %w[
68+ account_key
69+ account_name
70+ container_name
71+ environment
72+ ]
73+ missing = required . reject { |k | json . key? ( k ) && !json [ k ] . to_s . strip . empty? }
74+ return if missing . empty?
75+
76+ raise BlobstoreError . new ( "Missing required keys in config file #{ path } : #{ missing . join ( ', ' ) } (json: #{ json } )" )
77+ end
78+
79+ def validate_provider! ( json , path )
80+ provider = json [ 'provider' ]
81+ return unless provider . nil? || provider . to_s . strip . empty?
82+
83+ raise BlobstoreError . new ( "No provider specified in config file: #{ path . inspect } json: #{ json } " )
3184 end
3285 end
3386
34- def initialize ( connection_config :, directory_key :, resource_type :, root_dir :, min_size : nil , max_size : nil )
87+ def initialize ( provider :, directory_key :, resource_type :, root_dir :, min_size : nil , max_size : nil )
3588 @cli_path = cli_path
3689 @directory_key = directory_key
3790 @resource_type = resource_type . to_s
3891 @root_dir = root_dir
3992 @min_size = min_size || 0
4093 @max_size = max_size
41- @fork = connection_config . fetch ( :fork , false )
94+ @provider = provider
4295
4396 file_path = case @resource_type
4497 when 'droplets' , 'buildpack_cache'
@@ -114,28 +167,16 @@ def cp_to_blobstore(source_path, destination_key)
114167 end
115168
116169 def cp_file_between_keys ( source_key , destination_key )
117- if @fork
118- run_cli ( 'copy' , partitioned_key ( source_key ) , partitioned_key ( destination_key ) )
119- else
120- # Azure CLI doesn't support server-side copy yet, so fallback to local copy
121- Tempfile . create ( 'blob-copy' ) do |tmp |
122- download_from_blobstore ( source_key , tmp . path )
123- cp_to_blobstore ( tmp . path , destination_key )
124- end
125- end
170+ run_cli ( 'copy' , partitioned_key ( source_key ) , partitioned_key ( destination_key ) )
126171 end
127172
128173 def delete_all ( _ = nil )
129174 # page_size is currently not considered. Azure SDK / API has a limit of 5000
130- pass unless @fork
131-
132175 # Currently, storage-cli does not support bulk deletion.
133176 run_cli ( 'delete-recursive' , @root_dir )
134177 end
135178
136179 def delete_all_in_path ( path )
137- pass unless @fork
138-
139180 # Currently, storage-cli does not support bulk deletion.
140181 run_cli ( 'delete-recursive' , partitioned_key ( path ) )
141182 end
@@ -149,29 +190,19 @@ def delete_blob(blob)
149190 end
150191
151192 def blob ( key )
152- if @fork
153- properties = properties ( key )
154- return nil if properties . nil? || properties . empty?
155-
156- signed_url = sign_url ( partitioned_key ( key ) , verb : 'get' , expires_in_seconds : 3600 )
157- StorageCliBlob . new ( key , properties :, signed_url :)
158- elsif exists? ( key )
159- # Azure CLI does not support getting blob properties directly, so fallback to local check
160- signed_url = sign_url ( partitioned_key ( key ) , verb : 'get' , expires_in_seconds : 3600 )
161- StorageCliBlob . new ( key , signed_url :)
162- end
193+ properties = properties ( key )
194+ return nil if properties . nil? || properties . empty?
195+
196+ signed_url = sign_url ( partitioned_key ( key ) , verb : 'get' , expires_in_seconds : 3600 )
197+ StorageCliBlob . new ( key , properties :, signed_url :)
163198 end
164199
165200 def files_for ( prefix , _ignored_directory_prefixes = [ ] )
166- return nil unless @fork
167-
168201 files , _status = run_cli ( 'list' , prefix )
169202 files . split ( "\n " ) . map ( &:strip ) . reject ( &:empty? ) . map { |file | StorageCliBlob . new ( file ) }
170203 end
171204
172205 def ensure_bucket_exists
173- return unless @fork
174-
175206 run_cli ( 'ensure-bucket-exists' )
176207 end
177208
0 commit comments