Skip to content

Commit d63aef5

Browse files
committed
Codebase: Yapf the codebase again
1 parent 747fe86 commit d63aef5

File tree

17 files changed

+117
-113
lines changed

17 files changed

+117
-113
lines changed

volatility/framework/layers/vmware.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -91,9 +91,9 @@ def _read_header(self) -> None:
9191
if tags[("regionsCount", ())][1] == 0:
9292
raise VmwareFormatException(self.name, "VMware VMEM is not split into regions")
9393
for region in range(tags[("regionsCount", ())][1]):
94-
offset = tags[("regionPPN", (region,))][1] * self._page_size
95-
mapped_offset = tags[("regionPageNum", (region,))][1] * self._page_size
96-
length = tags[("regionSize", (region,))][1] * self._page_size
94+
offset = tags[("regionPPN", (region, ))][1] * self._page_size
95+
mapped_offset = tags[("regionPageNum", (region, ))][1] * self._page_size
96+
length = tags[("regionSize", (region, ))][1] * self._page_size
9797
self._segments.append((offset, mapped_offset, length, length))
9898

9999
@property

volatility/framework/plugins/layerwriter.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -80,7 +80,7 @@ def write_layer(
8080
def _generator(self):
8181
if self.config['list']:
8282
for name in self.context.layers:
83-
yield 0, (name,)
83+
yield 0, (name, )
8484
else:
8585
import pdb
8686
pdb.set_trace()
@@ -94,7 +94,7 @@ def _generator(self):
9494
for name in self.config['layers']:
9595
# Check the layer exists and validate the output file
9696
if name not in self.context.layers:
97-
yield 0, ('Layer Name {} does not exist'.format(name),)
97+
yield 0, ('Layer Name {} does not exist'.format(name), )
9898
else:
9999
output_name = self.config.get('output', ".".join([name, "raw"]))
100100
try:
@@ -106,14 +106,14 @@ def _generator(self):
106106
progress_callback = self._progress_callback)
107107
file_handle.close()
108108
except IOError as excp:
109-
yield 0, ('Layer cannot be written to {}: {}'.format(self.config['output_name'], excp),)
109+
yield 0, ('Layer cannot be written to {}: {}'.format(self.config['output_name'], excp), )
110110

111-
yield 0, ('Layer has been written to {}'.format(output_name),)
111+
yield 0, ('Layer has been written to {}'.format(output_name), )
112112

113113
def _generate_layers(self):
114114
"""List layer names from this run"""
115115
for name in self.context.layers:
116-
yield (0, (name,))
116+
yield (0, (name, ))
117117

118118
def run(self):
119119
if self.config['list']:

volatility/framework/plugins/mac/socket_filters.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -39,10 +39,11 @@ def _generator(self):
3939

4040
handlers = mac.MacUtilities.generate_kernel_handler_info(self.context, self.config['primary'], kernel, mods)
4141

42-
members_to_check = ["sf_unregistered", "sf_attach", "sf_detach", "sf_notify", "sf_getpeername",
43-
"sf_getsockname",
44-
"sf_data_in", "sf_data_out", "sf_connect_in", "sf_connect_out", "sf_bind", "sf_setoption",
45-
"sf_getoption", "sf_listen", "sf_ioctl"]
42+
members_to_check = [
43+
"sf_unregistered", "sf_attach", "sf_detach", "sf_notify", "sf_getpeername", "sf_getsockname", "sf_data_in",
44+
"sf_data_out", "sf_connect_in", "sf_connect_out", "sf_bind", "sf_setoption", "sf_getoption", "sf_listen",
45+
"sf_ioctl"
46+
]
4647

4748
filter_list = kernel.object_from_symbol(symbol_name = "sock_filter_head")
4849

volatility/framework/plugins/timeliner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ def run(self):
188188

189189
if isinstance(plugin, TimeLinerInterface):
190190
if not len(filter_list) or any(
191-
[filter in plugin.__module__ + '.' + plugin.__class__.__name__ for filter in filter_list]):
191+
[filter in plugin.__module__ + '.' + plugin.__class__.__name__ for filter in filter_list]):
192192
plugins_to_run.append(plugin)
193193
except exceptions.UnsatisfiedException as excp:
194194
# Remove the failed plugin from the list and continue

volatility/framework/plugins/windows/cachedump.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ def parse_cache_entry(self, cache_data):
5555
(uname_len, domain_len) = unpack("<HH", cache_data[:4])
5656
if len(cache_data[60:62]) == 0:
5757
return (uname_len, domain_len, 0, '', '')
58-
(domain_name_len,) = unpack("<H", cache_data[60:62])
58+
(domain_name_len, ) = unpack("<H", cache_data[60:62])
5959
ch = cache_data[64:80]
6060
enc_data = cache_data[96:]
6161
return (uname_len, domain_len, domain_name_len, enc_data, ch)

volatility/framework/plugins/windows/dlllist.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,8 +72,8 @@ def dump_pe(cls,
7272
if layer_name is None:
7373
layer_name = dll_entry.vol.layer_name
7474

75-
file_handle = open_method("{}{}.{:#x}.{:#x}.dmp".format(prefix, ntpath.basename(name),
76-
dll_entry.vol.offset, dll_entry.DllBase))
75+
file_handle = open_method("{}{}.{:#x}.{:#x}.dmp".format(prefix, ntpath.basename(name), dll_entry.vol.offset,
76+
dll_entry.DllBase))
7777

7878
dos_header = context.object(pe_table_name + constants.BANG + "_IMAGE_DOS_HEADER",
7979
offset = dll_entry.DllBase,

volatility/framework/plugins/windows/dumpfiles.py

Lines changed: 43 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
"vacb": "SharedCacheMap",
2222
}
2323

24+
2425
class DumpFiles(interfaces.plugins.PluginInterface):
2526
"""Dumps cached file contents from Windows memory samples."""
2627

@@ -31,26 +32,25 @@ class DumpFiles(interfaces.plugins.PluginInterface):
3132
def get_requirements(cls) -> List[interfaces.configuration.RequirementInterface]:
3233
# Since we're calling the plugin, make sure we have the plugin's requirements
3334
return [
34-
requirements.TranslationLayerRequirement(name='primary',
35-
description='Memory layer for the kernel',
36-
architectures=["Intel32", "Intel64"]),
37-
requirements.SymbolTableRequirement(name="nt_symbols", description="Windows kernel symbols"),
38-
requirements.IntRequirement(name='pid',
39-
description="Process ID to include (all other processes are excluded)",
40-
optional=True),
41-
requirements.IntRequirement(name='virtaddr',
42-
description="Dump a single _FILE_OBJECT at this virtual address",
43-
optional=True),
44-
requirements.IntRequirement(name='physaddr',
45-
description="Dump a single _FILE_OBJECT at this physical address",
46-
optional=True),
47-
requirements.VersionRequirement(name='pslist', component=pslist.PsList, version=(2, 0, 0)),
48-
requirements.VersionRequirement(name='handles', component=handles.Handles, version=(1, 0, 0))
35+
requirements.TranslationLayerRequirement(name = 'primary',
36+
description = 'Memory layer for the kernel',
37+
architectures = ["Intel32", "Intel64"]),
38+
requirements.SymbolTableRequirement(name = "nt_symbols", description = "Windows kernel symbols"),
39+
requirements.IntRequirement(name = 'pid',
40+
description = "Process ID to include (all other processes are excluded)",
41+
optional = True),
42+
requirements.IntRequirement(name = 'virtaddr',
43+
description = "Dump a single _FILE_OBJECT at this virtual address",
44+
optional = True),
45+
requirements.IntRequirement(name = 'physaddr',
46+
description = "Dump a single _FILE_OBJECT at this physical address",
47+
optional = True),
48+
requirements.VersionRequirement(name = 'pslist', component = pslist.PsList, version = (2, 0, 0)),
49+
requirements.VersionRequirement(name = 'handles', component = handles.Handles, version = (1, 0, 0))
4950
]
5051

5152
@classmethod
52-
def dump_file_producer(cls,
53-
file_object: interfaces.objects.ObjectInterface,
53+
def dump_file_producer(cls, file_object: interfaces.objects.ObjectInterface,
5454
memory_object: interfaces.objects.ObjectInterface,
5555
open_method: Type[interfaces.plugins.FileHandlerInterface],
5656
layer: interfaces.layers.DataLayerInterface,
@@ -86,14 +86,11 @@ def dump_file_producer(cls,
8686
vollog.debug("Stored {}".format(filedata.preferred_filename))
8787
return filedata
8888
except exceptions.InvalidAddressException:
89-
vollog.debug("Unable to dump file at {0:#x}".format(
90-
file_object.vol.offset))
89+
vollog.debug("Unable to dump file at {0:#x}".format(file_object.vol.offset))
9190
return None
9291

9392
@classmethod
94-
def process_file_object(cls,
95-
context: interfaces.context.ContextInterface,
96-
primary_layer_name: str,
93+
def process_file_object(cls, context: interfaces.context.ContextInterface, primary_layer_name: str,
9794
open_method: Type[interfaces.plugins.FileHandlerInterface],
9895
file_obj: interfaces.objects.ObjectInterface) -> Tuple:
9996
"""Given a FILE_OBJECT, dump data to separate files for each of the three file caches.
@@ -153,10 +150,8 @@ def process_file_object(cls,
153150
for memory_object, layer, extension in dump_parameters:
154151
cache_name = EXTENSION_CACHE_MAP[extension]
155152
desired_file_name = "file.{0:#x}.{1:#x}.{2}.{3}.{4}".format(file_obj.vol.offset,
156-
memory_object.vol.offset,
157-
cache_name,
158-
ntpath.basename(obj_name),
159-
extension)
153+
memory_object.vol.offset, cache_name,
154+
ntpath.basename(obj_name), extension)
160155

161156
file_handle = DumpFiles.dump_file_producer(file_obj, memory_object, open_method, layer, desired_file_name)
162157

@@ -165,8 +160,10 @@ def process_file_object(cls,
165160
file_handle.close()
166161
file_output = file_handle.preferred_filename
167162

168-
yield (cache_name, format_hints.Hex(file_obj.vol.offset),
169-
ntpath.basename(obj_name), # temporary, so its easier to visualize output
163+
yield (
164+
cache_name,
165+
format_hints.Hex(file_obj.vol.offset),
166+
ntpath.basename(obj_name), # temporary, so its easier to visualize output
170167
file_output)
171168

172169
def _generator(self, procs: List, offsets: List):
@@ -176,13 +173,13 @@ def _generator(self, procs: List, offsets: List):
176173
# private variables, so we need an instance (for now, anyway). We _could_ call Handles._generator()
177174
# to do some of the other work that is duplicated here, but then we'd need to parse the TreeGrid
178175
# results instead of just dealing with them as direct objects here.
179-
handles_plugin = handles.Handles(context=self.context, config_path=self._config_path)
180-
type_map = handles_plugin.get_type_map(context=self.context,
181-
layer_name=self.config["primary"],
182-
symbol_table=self.config["nt_symbols"])
183-
cookie = handles_plugin.find_cookie(context=self.context,
184-
layer_name=self.config["primary"],
185-
symbol_table=self.config["nt_symbols"])
176+
handles_plugin = handles.Handles(context = self.context, config_path = self._config_path)
177+
type_map = handles_plugin.get_type_map(context = self.context,
178+
layer_name = self.config["primary"],
179+
symbol_table = self.config["nt_symbols"])
180+
cookie = handles_plugin.find_cookie(context = self.context,
181+
layer_name = self.config["primary"],
182+
symbol_table = self.config["nt_symbols"])
186183

187184
for proc in procs:
188185

@@ -198,7 +195,8 @@ def _generator(self, procs: List, offsets: List):
198195
obj_type = entry.get_object_type(type_map, cookie)
199196
if obj_type == "File":
200197
file_obj = entry.Body.cast("_FILE_OBJECT")
201-
for result in self.process_file_object(self.context, self.config["primary"], self.open, file_obj):
198+
for result in self.process_file_object(self.context, self.config["primary"], self.open,
199+
file_obj):
202200
yield (0, result)
203201
except exceptions.InvalidAddressException:
204202
vollog.log(constants.LOGLEVEL_VVV,
@@ -221,7 +219,8 @@ def _generator(self, procs: List, offsets: List):
221219
if not file_obj.is_valid():
222220
continue
223221

224-
for result in self.process_file_object(self.context, self.config["primary"], self.open, file_obj):
222+
for result in self.process_file_object(self.context, self.config["primary"], self.open,
223+
file_obj):
225224
yield (0, result)
226225
except exceptions.InvalidAddressException:
227226
vollog.log(constants.LOGLEVEL_VVV,
@@ -237,14 +236,13 @@ def _generator(self, procs: List, offsets: List):
237236
layer_name = self.context.layers[layer_name].config["memory_layer"]
238237

239238
file_obj = self.context.object(self.config["nt_symbols"] + constants.BANG + "_FILE_OBJECT",
240-
layer_name=layer_name,
241-
native_layer_name=self.config["primary"],
242-
offset=offset)
239+
layer_name = layer_name,
240+
native_layer_name = self.config["primary"],
241+
offset = offset)
243242
for result in self.process_file_object(self.context, self.config["primary"], self.open, file_obj):
244243
yield (0, result)
245244
except exceptions.InvalidAddressException:
246-
vollog.log(constants.LOGLEVEL_VVV,
247-
"Cannot extract file at {0:#x}".format(offset))
245+
vollog.log(constants.LOGLEVEL_VVV, "Cannot extract file at {0:#x}".format(offset))
248246

249247
def run(self):
250248
# a list of tuples (<int>, <bool>) where <int> is the address and <bool> is True for virtual.
@@ -261,8 +259,7 @@ def run(self):
261259
procs = pslist.PsList.list_processes(self.context,
262260
self.config["primary"],
263261
self.config["nt_symbols"],
264-
filter_func=filter_func)
262+
filter_func = filter_func)
265263

266-
return renderers.TreeGrid(
267-
[("Cache", str), ("FileObject", format_hints.Hex), ("FileName", str), ("Result", str)],
268-
self._generator(procs, offsets))
264+
return renderers.TreeGrid([("Cache", str), ("FileObject", format_hints.Hex), ("FileName", str),
265+
("Result", str)], self._generator(procs, offsets))

volatility/framework/plugins/windows/handles.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,8 @@ def _get_item(self, handle_table_entry, handle_value):
9090
if not has_capstone:
9191
raise AttributeError("Unable to find the SAR value for decoding handle table pointers")
9292
else:
93-
raise exceptions.MissingModuleException("capstone",
94-
"Unable to find the SAR value for decoding handle table pointers")
93+
raise exceptions.MissingModuleException(
94+
"capstone", "Unable to find the SAR value for decoding handle table pointers")
9595

9696
offset = self._decode_pointer(handle_table_entry.LowValue, magic)
9797
# print("LowValue: {0:#x} Magic: {1:#x} Offset: {2:#x}".format(handle_table_entry.InfoTable, magic, offset))

volatility/framework/plugins/windows/malfind.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -105,8 +105,8 @@ def list_injections(
105105
continue
106106

107107
if (vad.get_private_memory() == 1
108-
and vad.get_tag() == "VadS") or (vad.get_private_memory() == 0
109-
and protection_string != "PAGE_EXECUTE_WRITECOPY"):
108+
and vad.get_tag() == "VadS") or (vad.get_private_memory() == 0
109+
and protection_string != "PAGE_EXECUTE_WRITECOPY"):
110110
if cls.is_vad_empty(proc_layer, vad):
111111
continue
112112

volatility/framework/plugins/windows/netscan.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ def determine_tcpip_version(cls, context: interfaces.context.ContextInterface, l
185185
}
186186

187187
# special use case: Win10_18363 is not recognized by windows.info as 18363
188-
# because all kernel file headers and debug structures report 18363 as
188+
# because all kernel file headers and debug structures report 18363 as
189189
# "10.0.18362.1198" with the last part being incremented. However, we can use
190190
# os_distinguisher to differentiate between 18362 and 18363
191191
if vers_minor_version == 18362 and is_18363_or_later:
@@ -202,9 +202,11 @@ def determine_tcpip_version(cls, context: interfaces.context.ContextInterface, l
202202
# no match on filename means that we possibly have a version newer than those listed here.
203203
# try to grab the latest supported version of the current image NT version. If that symbol
204204
# version does not work, support has to be added manually.
205-
current_versions = [key for key in list(version_dict.keys()) if key[0] == nt_major_version and key[1] == nt_minor_version]
205+
current_versions = [
206+
key for key in list(version_dict.keys()) if key[0] == nt_major_version and key[1] == nt_minor_version
207+
]
206208
current_versions.sort()
207-
209+
208210
if current_versions:
209211
latest_version = current_versions[-1]
210212

0 commit comments

Comments
 (0)