|
4 | 4 | from .BaseSource import BaseSourceClass |
5 | 5 | from .DeDuplication import Deduplication |
6 | 6 | from .CentralScheduling import Scheduling |
| 7 | +import threading |
| 8 | +from psutil import cpu_percent, virtual_memory |
7 | 9 | from uuid import uuid4 |
8 | 10 |
|
9 | 11 |
|
@@ -54,60 +56,128 @@ def Parse(self, source=None, config=None): |
54 | 56 | """ |
55 | 57 | self.ioc.getLogger().trace("Starting Parse of Environment", trace=True) |
56 | 58 | Configuration = self.generate_config_set(source=source, config=config) |
57 | | - for conf in Configuration: |
| 59 | + ScanPool = [] |
| 60 | + lenConfigs = len(Configuration) |
| 61 | + i = 0 |
| 62 | + while i < lenConfigs: |
| 63 | + # ensure we don't swamp the system resources |
| 64 | + cpu = cpu_percent(interval=.1) |
| 65 | + mem = virtual_memory().percent |
| 66 | + if \ |
| 67 | + cpu >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')) or \ |
| 68 | + mem >= int(self.ioc.getConfig().get('NodeInformation', 'ResourceMax')): |
| 69 | + self.ioc.getLogger().trace("Scan sleeping; System resource maximum reached", verbose=True) |
| 70 | + # remove variables |
| 71 | + del cpu |
| 72 | + del mem |
| 73 | + continue |
| 74 | + conf = Configuration[i] |
| 75 | + i += 1 |
| 76 | + # ensure no kafka prototypes come into sourcing |
58 | 77 | if conf.get('source') == 'kafka': |
59 | 78 | continue |
| 79 | + # ensure there is an execution environment |
| 80 | + server, _ = self.scheduler.determineExecutionServer(conf.get('exe_env', 'general')) |
| 81 | + if not server: |
| 82 | + self.ioc.getLogger().warning( |
| 83 | + 'configuration skipped -- execution environment offline', |
| 84 | + additional={ |
| 85 | + 'execution_environment': conf.get('exe_env', 'general'), |
| 86 | + 'configuration': conf.get('name') |
| 87 | + }, |
| 88 | + notify=True |
| 89 | + ) |
| 90 | + continue |
60 | 91 | inst = self.impTool.load(conf.get('source', str(uuid4()))) |
61 | 92 | if not isinstance(inst, BaseSourceClass): |
62 | 93 | self.ioc.getLogger().error("Invalid Source [{0}]".format(conf.get('source')), notify=False) |
63 | 94 | del inst |
64 | 95 | continue |
65 | 96 | else: |
66 | | - try: |
67 | | - # If mock mode enabled |
68 | | - if self.ioc.getConfig().get('Sourcing', 'mock'): |
69 | | - data = inst.mock_data(conf) |
70 | | - # else actually do sourcing |
71 | | - else: |
72 | | - if inst.parse_source(conf): |
73 | | - # deduplicate data |
74 | | - data = self.dedup.Deduplicate( |
75 | | - data=inst.get_data(), |
76 | | - source=conf.get('source'), |
77 | | - configuration=conf.get('name', str(uuid4())), |
78 | | - threshold=inst.deduplication_strength, |
79 | | - expiry_hours=inst.deduplication_expiry, |
80 | | - expiry_max=inst.deduplication_expiry_max, |
81 | | - collection='Dedup_Sourcing', |
82 | | - field_set=inst.field_set |
83 | | - ) |
84 | | - else: |
85 | | - self.ioc.getLogger().warning( |
86 | | - "Source [{0}] parsing failed".format(conf.get('source')), |
87 | | - notify=False |
88 | | - ) |
89 | | - data = [] |
90 | | - if len(data) > 0: |
91 | | - if self.scheduler.scheduleDetection(conf.get('source'), conf.get('name'), data): |
92 | | - self.ioc.getLogger().info( |
93 | | - "Data scheduled for detection from source [{0}]".format(conf.get('source')), |
94 | | - trace=True |
95 | | - ) |
96 | | - del inst |
97 | | - continue |
98 | | - else: |
99 | | - self.ioc.getLogger().error("Scheduling failed for source document!", notify=False) |
100 | | - del inst |
101 | | - continue |
102 | | - else: |
103 | | - self.ioc.getLogger().trace("Length of data was empty; was not scheduled", trace=True) |
104 | | - del inst |
105 | | - continue |
106 | | - except BaseException as e: |
107 | | - self.ioc.getLogger().error("Failed parsing message got exception! Configuration [{0}] Got [{1}]".format(conf, e)) |
108 | | - continue |
| 97 | + t = threading.Thread( |
| 98 | + target=self.ParseSource, |
| 99 | + args=( |
| 100 | + self.ioc, |
| 101 | + inst, |
| 102 | + conf, |
| 103 | + self.dedup, |
| 104 | + self.scheduler, |
| 105 | + ), |
| 106 | + name="GREASE SOURCING THREAD [{0}]".format(conf.get('name')) |
| 107 | + ) |
| 108 | + t.daemon = True |
| 109 | + t.start() |
| 110 | + ScanPool.append(t) |
| 111 | + # wait for threads to finish out |
| 112 | + while len(ScanPool) > 0: |
| 113 | + self.ioc.getLogger().trace("Total current scan threads [{0}]".format(len(ScanPool)), trace=True) |
| 114 | + threads_final = [] |
| 115 | + for thread in ScanPool: |
| 116 | + if thread.isAlive(): |
| 117 | + threads_final.append(thread) |
| 118 | + ScanPool = threads_final |
| 119 | + self.ioc.getLogger().trace("Total current scan threads [{0}]".format(len(ScanPool)), trace=True) |
| 120 | + self.ioc.getLogger().trace("Scanning Complete".format(len(ScanPool)), trace=True) |
109 | 121 | return True |
110 | 122 |
|
| 123 | + @staticmethod |
| 124 | + def ParseSource(ioc, source, configuration, deduplication, scheduler): |
| 125 | + """Parses an individual source and attempts to schedule it |
| 126 | +
|
| 127 | + Args: |
| 128 | + ioc (GreaseContainer): IoC Instance |
| 129 | + source (BaseSourceClass): Source to parse |
| 130 | + configuration (dict): Prototype configuration to use |
| 131 | + deduplication (Deduplication): Dedup engine instance |
| 132 | + scheduler (Scheduling): Central Scheduling instance |
| 133 | +
|
| 134 | + Returns: |
| 135 | + None: Meant to be run in a thread |
| 136 | +
|
| 137 | + """ |
| 138 | + try: |
| 139 | + # If mock mode enabled |
| 140 | + if ioc.getConfig().get('Sourcing', 'mock'): |
| 141 | + data = source.mock_data(configuration) |
| 142 | + # else actually do sourcing |
| 143 | + else: |
| 144 | + if source.parse_source(configuration): |
| 145 | + # deduplicate data |
| 146 | + data = deduplication.Deduplicate( |
| 147 | + data=source.get_data(), |
| 148 | + source=configuration.get('source'), |
| 149 | + configuration=configuration.get('name', str(uuid4())), |
| 150 | + threshold=source.deduplication_strength, |
| 151 | + expiry_hours=source.deduplication_expiry, |
| 152 | + expiry_max=source.deduplication_expiry_max, |
| 153 | + collection='Dedup_Sourcing', |
| 154 | + field_set=source.field_set |
| 155 | + ) |
| 156 | + else: |
| 157 | + ioc.getLogger().warning( |
| 158 | + "Source [{0}] parsing failed".format(configuration.get('source')), |
| 159 | + notify=False |
| 160 | + ) |
| 161 | + data = [] |
| 162 | + if len(data) > 0: |
| 163 | + if scheduler.scheduleDetection(configuration.get('source'), configuration.get('name'), data): |
| 164 | + ioc.getLogger().info( |
| 165 | + "Data scheduled for detection from source [{0}]".format(configuration.get('source')), |
| 166 | + trace=True |
| 167 | + ) |
| 168 | + del source |
| 169 | + else: |
| 170 | + ioc.getLogger().error("Scheduling failed for source document!", notify=False) |
| 171 | + del source |
| 172 | + else: |
| 173 | + ioc.getLogger().trace("Length of data was empty; was not scheduled", trace=True) |
| 174 | + del source |
| 175 | + except BaseException as e: |
| 176 | + ioc.getLogger().error( |
| 177 | + "Failed parsing message got exception! Configuration [{0}] Got [{1}]".format(configuration, e) |
| 178 | + ) |
| 179 | + del source |
| 180 | + |
111 | 181 | def generate_config_set(self, source=None, config=None): |
112 | 182 | """Examines configuration and returns list of configs to parse |
113 | 183 |
|
|
0 commit comments