66require 'heimdall_tools/asff_compatible_products/prowler'
77require 'heimdall_tools/asff_compatible_products/securityhub'
88
9-
109module HeimdallTools
1110 DEFAULT_NIST_TAG = %w{ SA-11 RA-5 } . freeze
1211
@@ -40,19 +39,19 @@ class ASFFMapper
4039 } . freeze
4140
4241 PRODUCT_ARN_MAPPING = {
43- / arn:.+:securityhub:.+:.*:product\ / aws\ / firewall-manager/ => FirewallManager ,
44- / arn:.+:securityhub:.+:.*:product\ / aws\ / securityhub/ => SecurityHub ,
45- / arn:.+:securityhub:.+:.*:product\ / prowler\ / prowler/ => Prowler
42+ %r{ arn:.+:securityhub:.+:.*:product/aws/firewall-manager} => FirewallManager ,
43+ %r{ arn:.+:securityhub:.+:.*:product/aws/securityhub} => SecurityHub ,
44+ %r{ arn:.+:securityhub:.+:.*:product/prowler/prowler} => Prowler
4645 } . freeze
4746
4847 def initialize ( asff_json , securityhub_standards_json_array : nil , meta : nil )
4948 @meta = meta
5049
5150 @supporting_docs = { }
52- @supporting_docs [ SecurityHub ] = SecurityHub . supporting_docs ( { standards : securityhub_standards_json_array } )
51+ @supporting_docs [ SecurityHub ] = SecurityHub . supporting_docs ( { standards : securityhub_standards_json_array } )
5352
5453 begin
55- asff_required_keys = %w( AwsAccountId CreatedAt Description GeneratorId Id ProductArn Resources SchemaVersion Severity Title Types UpdatedAt )
54+ asff_required_keys = %w{ AwsAccountId CreatedAt Description GeneratorId Id ProductArn Resources SchemaVersion Severity Title Types UpdatedAt }
5655 @report = JSON . parse ( asff_json )
5756 if @report . length == 1 && @report . member? ( 'Findings' ) && @report [ 'Findings' ] . each { |finding | asff_required_keys . difference ( finding . keys ) . none? } . all?
5857 # ideal case that is spec compliant
@@ -61,7 +60,7 @@ def initialize(asff_json, securityhub_standards_json_array: nil, meta: nil)
6160 # individual finding so have to add wrapping array
6261 @report = { 'Findings' => [ @report ] }
6362 else
64- raise " Not a findings file nor an individual finding"
63+ raise ' Not a findings file nor an individual finding'
6564 end
6665 rescue StandardError => e
6766 raise "Invalid ASFF file provided:\n Exception: #{ e } "
@@ -79,12 +78,10 @@ def external_product_handler(product, data, func, default)
7978 keywords = { encode : method ( :encode ) }
8079 keywords = keywords . merge ( @supporting_docs [ PRODUCT_ARN_MAPPING [ arn || product ] ] ) if @supporting_docs . member? ( PRODUCT_ARN_MAPPING [ arn || product ] )
8180 PRODUCT_ARN_MAPPING [ arn || product ] . send ( func , data , **keywords )
81+ elsif default . is_a? Proc
82+ default . call
8283 else
83- if default . is_a? Proc
84- default . call
85- else
86- default
87- end
84+ default
8885 end
8986 end
9087
@@ -100,7 +97,7 @@ def impact(finding)
10097 imp = :INFORMATIONAL
10198 else
10299 # severity is required, but can be either 'label' or 'normalized' internally with 'label' being preferred. other values can be in here too such as the original severity rating.
103- default = Proc . new { finding [ 'Severity' ] . key? ( 'Label' ) ? finding [ 'Severity' ] [ 'Label' ] . to_sym : finding [ 'Severity' ] [ 'Normalized' ] /100.0 }
100+ default = proc { finding [ 'Severity' ] . key? ( 'Label' ) ? finding [ 'Severity' ] [ 'Label' ] . to_sym : finding [ 'Severity' ] [ 'Normalized' ] /100.0 }
104101 imp = external_product_handler ( finding [ 'ProductArn' ] , finding , :finding_impact , default )
105102 end
106103 imp . is_a? ( Symbol ) ? IMPACT_MAPPING [ imp ] : imp
@@ -140,7 +137,7 @@ def subfindings(finding)
140137
141138 subfinding [ 'code_desc' ] = external_product_handler ( finding [ 'ProductArn' ] , finding , :subfindings_code_desc , '' )
142139 subfinding [ 'code_desc' ] += '; ' unless subfinding [ 'code_desc' ] . empty?
143- subfinding [ 'code_desc' ] += "Resources: [#{ finding [ 'Resources' ] . map { |r | "Type: #{ encode ( r [ 'Type' ] ) } , Id: #{ encode ( r [ 'Id' ] ) } #{ ' , Partition: ' + encode ( r [ 'Partition' ] ) if r . key? ( 'Partition' ) } #{ ' , Region: ' + encode ( r [ 'Region' ] ) if r . key? ( 'Region' ) } " } . join ( ', ' ) } ]"
140+ subfinding [ 'code_desc' ] += "Resources: [#{ finding [ 'Resources' ] . map { |r | "Type: #{ encode ( r [ 'Type' ] ) } , Id: #{ encode ( r [ 'Id' ] ) } #{ " , Partition: #{ encode ( r [ 'Partition' ] ) } " if r . key? ( 'Partition' ) } #{ " , Region: #{ encode ( r [ 'Region' ] ) } " if r . key? ( 'Region' ) } " } . join ( ', ' ) } ]"
144141
145142 subfinding [ 'start_time' ] = finding . key? ( 'LastObservedAt' ) ? finding [ 'LastObservedAt' ] : finding [ 'UpdatedAt' ]
146143
@@ -167,7 +164,7 @@ def to_hdf
167164 item [ 'desc' ] = encode ( finding [ 'Description' ] )
168165
169166 item [ 'descriptions' ] = [ ]
170- item [ 'descriptions' ] << desc_tags ( finding [ 'Remediation' ] [ 'Recommendation' ] . map { |k , v | encode ( v ) } . join ( "\n " ) , 'fix' ) if finding . key? ( 'Remediation' ) && finding [ 'Remediation' ] . key? ( 'Recommendation' )
167+ item [ 'descriptions' ] << desc_tags ( finding [ 'Remediation' ] [ 'Recommendation' ] . map { |_k , v | encode ( v ) } . join ( "\n " ) , 'fix' ) if finding . key? ( 'Remediation' ) && finding [ 'Remediation' ] . key? ( 'Recommendation' )
171168
172169 item [ 'refs' ] = [ ]
173170 item [ 'refs' ] << { url : finding [ 'SourceUrl' ] } if finding . key? ( 'SourceUrl' )
@@ -203,7 +200,7 @@ def to_hdf
203200 # add product name to id if any ids are the same across products
204201 item [ 'id' ] = product_groups . filter { |pg | pg != product } . values . any? { |ig | ig . keys . include? ( id ) } ? "[#{ product_name } ] #{ id } " : id
205202
206- item [ 'title' ] = "#{ product_name } : #{ group . map { |d | d [ 'title' ] } . uniq . join ( ";" ) } "
203+ item [ 'title' ] = "#{ product_name } : #{ group . map { |d | d [ 'title' ] } . uniq . join ( ';' ) } "
207204
208205 item [ 'tags' ] = { nist : group . map { |d | d [ 'tags' ] [ :nist ] } . flatten . uniq }
209206
@@ -216,16 +213,16 @@ def to_hdf
216213 item [ 'refs' ] = group . map { |d | d [ 'refs' ] } . flatten . compact . reject ( &:empty? ) . uniq
217214
218215 item [ 'source_location' ] = NA_HASH
219- item [ 'code' ] = JSON . pretty_generate ( { " Findings" : findings } )
216+ item [ 'code' ] = JSON . pretty_generate ( { Findings : findings } )
220217
221218 item [ 'results' ] = group . map { |d | d [ 'results' ] } . flatten . uniq
222219
223220 controls << item
224221 end
225222 end
226223
227- results = HeimdallDataFormat . new ( profile_name : @meta && @meta . key? ( 'name' ) ? @meta [ 'name' ] : 'AWS Security Finding Format' ,
228- title : @meta && @meta . key? ( 'title' ) ? @meta [ 'title' ] : " ASFF findings" ,
224+ results = HeimdallDataFormat . new ( profile_name : @meta & .key? ( 'name' ) ? @meta [ 'name' ] : 'AWS Security Finding Format' ,
225+ title : @meta & .key? ( 'title' ) ? @meta [ 'title' ] : ' ASFF findings' ,
229226 controls : controls )
230227 results . to_hdf
231228 end
0 commit comments