1
1
import logging
2
+ from pathlib import PurePath
3
+
2
4
import requests
3
5
from urllib .parse import urlencode
4
6
import base64
46
48
log = logging .getLogger ("socketdev" )
47
49
log .addHandler (logging .NullHandler ())
48
50
51
+ socket_globs = {
52
+ "npm" : {
53
+ "package.json" : {
54
+ "pattern" : "package.json"
55
+ },
56
+ "package-lock.json" : {
57
+ "pattern" : "package-lock.json"
58
+ },
59
+ "npm-shrinkwrap.json" : {
60
+ "pattern" : "npm-shrinkwrap.json"
61
+ },
62
+ "yarn.lock" : {
63
+ "pattern" : "yarn.lock"
64
+ },
65
+ "pnpm-lock.yaml" : {
66
+ "pattern" : "pnpm-lock.yaml"
67
+ },
68
+ "pnpm-lock.yml" : {
69
+ "pattern" : "pnpm-lock.yml"
70
+ },
71
+ "pnpm-workspace.yaml" : {
72
+ "pattern" : "pnpm-workspace.yaml"
73
+ },
74
+ "pnpm-workspace.yml" : {
75
+ "pattern" : "pnpm-workspace.yml"
76
+ }
77
+ },
78
+ "pypi" : {
79
+ "pipfile" : {
80
+ "pattern" : "pipfile"
81
+ },
82
+ "pyproject.toml" : {
83
+ "pattern" : "pyproject.toml"
84
+ },
85
+ "requirements.txt" : {
86
+ "pattern" : "*requirements.txt"
87
+ },
88
+ "requirements" : {
89
+ "pattern" : "requirements/*.txt"
90
+ },
91
+ "requirements-*.txt" : {
92
+ "pattern" : "requirements-*.txt"
93
+ },
94
+ "requirements_*.txt" : {
95
+ "pattern" : "requirements_*.txt"
96
+ },
97
+ "requirements.frozen" : {
98
+ "pattern" : "requirements.frozen"
99
+ },
100
+ "setup.py" : {
101
+ "pattern" : "setup.py"
102
+ }
103
+ },
104
+ "golang" : {
105
+ "go.mod" : {
106
+ "pattern" : "go.mod"
107
+ },
108
+ "go.sum" : {
109
+ "pattern" : "go.sum"
110
+ }
111
+ },
112
+ "java" : {
113
+ "pom.xml" : {
114
+ "pattern" : "pom.xml"
115
+ }
116
+ }
117
+ }
118
+
49
119
50
120
def encode_key (token : str ) -> None :
51
121
"""
@@ -287,125 +357,61 @@ def get_manifest_files(package: Package, packages: dict) -> str:
287
357
return manifest_files
288
358
289
359
@staticmethod
290
- def create_sbom_output (diff : Diff ) -> list :
291
- sbom = []
292
- for package_id in diff .packages :
293
- package : Package
294
- package = diff .packages [package_id ]
295
- manifest_files = Core .get_manifest_files (package , diff .packages )
296
- item = {
297
- "id" : package .id ,
298
- "license" : package .license ,
299
- "license_text" : package .license_text ,
300
- "manifestFiles" : manifest_files ,
301
- "score" : package .score ,
302
- "size" : package .size ,
303
- "ecosystem" : package .type ,
304
- "alerts" : package .alerts ,
305
- "direct" : package .direct ,
306
- "name" : package .name ,
307
- "version" : package .version ,
308
- "author" : package .author ,
309
- "url" : package .url
310
- }
311
- sbom .append (item )
360
+ def create_sbom_output (diff : Diff ) -> dict :
361
+ base_path = f"orgs/{ org_slug } /export/cdx"
362
+ path = f"{ base_path } /{ diff .id } "
363
+ result = do_request (path = path )
364
+ try :
365
+ sbom = result .json ()
366
+ except Exception as error :
367
+ log .error (f"Unable to get CycloneDX Output for { diff .id } " )
368
+ log .error (error )
369
+ sbom = {}
312
370
return sbom
313
371
314
372
@staticmethod
315
- def find_files (path : str , new_files : list = None ) -> list :
373
+ def match_supported_files (path : str , files : list ) -> list :
374
+ matched = []
375
+ for ecosystem in socket_globs :
376
+ patterns = socket_globs [ecosystem ]
377
+ for file_name in patterns :
378
+ pattern = patterns [file_name ]["pattern" ]
379
+ # path_pattern = f"**/{pattern}"
380
+ for file in files :
381
+ if "\\ " in file :
382
+ file = file .replace ("\\ " , "/" )
383
+ if PurePath (file ).match (pattern ):
384
+ matched .append (file )
385
+ return matched
386
+
387
+ @staticmethod
388
+ def find_files (path : str , files : list = None ) -> list :
316
389
"""
317
390
Globs the path for supported manifest files.
318
391
Note: Might move the source to a JSON file
319
392
:param path: Str - path to where the manifest files are located
320
- :param new_files:
393
+ :param files: override finding the manifest files using the glob matcher
321
394
:return:
322
395
"""
323
- socket_globs = {
324
- "npm" : {
325
- "package.json" : {
326
- "pattern" : "package.json"
327
- },
328
- "package-lock.json" : {
329
- "pattern" : "package-lock.json"
330
- },
331
- "npm-shrinkwrap.json" : {
332
- "pattern" : "npm-shrinkwrap.json"
333
- },
334
- "yarn.lock" : {
335
- "pattern" : "yarn.lock"
336
- },
337
- "pnpm-lock.yaml" : {
338
- "pattern" : "pnpm-lock.yaml"
339
- },
340
- "pnpm-lock.yml" : {
341
- "pattern" : "pnpm-lock.yml"
342
- },
343
- "pnpm-workspace.yaml" : {
344
- "pattern" : "pnpm-workspace.yaml"
345
- },
346
- "pnpm-workspace.yml" : {
347
- "pattern" : "pnpm-workspace.yml"
348
- }
349
- },
350
- "pypi" : {
351
- "pipfile" : {
352
- "pattern" : "pipfile"
353
- },
354
- "pyproject.toml" : {
355
- "pattern" : "pyproject.toml"
356
- },
357
- "requirements.txt" : {
358
- "pattern" : "*requirements.txt"
359
- },
360
- "requirements" : {
361
- "pattern" : "requirements/*.txt"
362
- },
363
- "requirements-*.txt" : {
364
- "pattern" : "requirements-*.txt"
365
- },
366
- "requirements_*.txt" : {
367
- "pattern" : "requirements_*.txt"
368
- },
369
- "requirements.frozen" : {
370
- "pattern" : "requirements.frozen"
371
- },
372
- "setup.py" : {
373
- "pattern" : "setup.py"
374
- }
375
- },
376
- "golang" : {
377
- "go.mod" : {
378
- "pattern" : "go.mod"
379
- },
380
- "go.sum" : {
381
- "pattern" : "go.sum"
382
- }
383
- },
384
- "java" : {
385
- "pom.xml" : {
386
- "pattern" : "pom.xml"
387
- }
388
- }
389
- }
390
396
all_files = []
391
397
for ecosystem in socket_globs :
392
398
patterns = socket_globs [ecosystem ]
393
399
for file_name in patterns :
394
400
pattern = patterns [file_name ]["pattern" ]
395
401
file_path = f"{ path } /**/{ pattern } "
396
- files = glob (file_path , recursive = True )
402
+ if files is None or len (files ) == 0 :
403
+ files = glob (file_path , recursive = True )
404
+ else :
405
+ files = Core .match_supported_files (path , files )
397
406
for file in files :
398
- if "/" in file :
399
- _ , base_name = file .rsplit ("/" , 1 )
400
- else :
401
- base_name = file
402
- if new_files is not None and len (new_files ) > 0 and base_name not in new_files :
403
- continue
404
407
if platform .system () == "Windows" :
405
408
file = file .replace ("\\ " , "/" )
409
+ if path not in file :
410
+ file = f"{ path } /{ file } "
406
411
found_path , file_name = file .rsplit ("/" , 1 )
407
412
details = (found_path , file_name )
408
- all_files .append (details )
413
+ if details not in all_files :
414
+ all_files .append (details )
409
415
return all_files
410
416
411
417
@staticmethod
@@ -480,7 +486,13 @@ def get_full_scan(full_scan_id: str) -> FullScan:
480
486
return full_scan
481
487
482
488
@staticmethod
483
- def create_new_diff (path : str , params : FullScanParams , workspace : str , new_files : list = None ) -> Diff :
489
+ def create_new_diff (
490
+ path : str ,
491
+ params : FullScanParams ,
492
+ workspace : str ,
493
+ new_files : list = None ,
494
+ no_change : bool = False
495
+ ) -> Diff :
484
496
"""
485
497
1. Get the head full scan. If it isn't present because this repo doesn't exist yet return an Empty full scan.
486
498
2. Create a new Full scan for the current run
@@ -490,9 +502,14 @@ def create_new_diff(path: str, params: FullScanParams, workspace: str, new_files
490
502
:param params: FullScanParams - Query params for the Full Scan endpoint
491
503
:param workspace: str - Path for workspace
492
504
:param new_files:
505
+ :param no_change:
493
506
:return:
494
507
"""
508
+ if no_change :
509
+ return Diff ()
495
510
files = Core .find_files (path , new_files )
511
+ if files is None or len (files ) == 0 :
512
+ return Diff ()
496
513
try :
497
514
head_full_scan_id = Core .get_head_scan_for_repo (params .repo )
498
515
if head_full_scan_id is None or head_full_scan_id == "" :
@@ -505,17 +522,15 @@ def create_new_diff(path: str, params: FullScanParams, workspace: str, new_files
505
522
log .info (f"Total time to get head full-scan { total_head_time : .2f} " )
506
523
except APIResourceNotFound :
507
524
head_full_scan = []
508
- if files is not None and len (files ) > 0 :
509
- new_scan_start = time .time ()
510
- new_full_scan = Core .create_full_scan (files , params , workspace )
511
- new_full_scan .packages = Core .create_sbom_dict (new_full_scan .sbom_artifacts )
512
- new_scan_end = time .time ()
513
- total_new_time = new_scan_end - new_scan_start
514
- log .info (f"Total time to get new full-scan { total_new_time : .2f} " )
515
- diff_report = Core .compare_sboms (new_full_scan .sbom_artifacts , head_full_scan )
516
- diff_report .packages = new_full_scan .packages
517
- else :
518
- diff_report = Diff ()
525
+ new_scan_start = time .time ()
526
+ new_full_scan = Core .create_full_scan (files , params , workspace )
527
+ new_full_scan .packages = Core .create_sbom_dict (new_full_scan .sbom_artifacts )
528
+ new_scan_end = time .time ()
529
+ total_new_time = new_scan_end - new_scan_start
530
+ log .info (f"Total time to get new full-scan { total_new_time : .2f} " )
531
+ diff_report = Core .compare_sboms (new_full_scan .sbom_artifacts , head_full_scan )
532
+ diff_report .packages = new_full_scan .packages
533
+ diff_report .id = new_full_scan .id
519
534
return diff_report
520
535
521
536
@staticmethod
0 commit comments