1
1
__license__ = "GNU Affero General Public License http://www.gnu.org/licenses/agpl.html"
2
2
__copyright__ = "Copyright (C) 2020 The OctoPrint Project - Released under terms of the AGPLv3 License"
3
3
4
+ import glob
5
+ import json
4
6
import os
5
7
import re
6
8
import threading
9
+ import time
7
10
from collections import defaultdict
8
11
9
12
# noinspection PyCompatibility
18
21
from octoprint .access import ADMIN_GROUP , USER_GROUP
19
22
from octoprint .filemanager import get_file_type
20
23
24
+ WIZARD_VERSION = 1 # bump on addition of critical checks
25
+
26
+ CHECKS_VERSION = 1 # bump on any change to the checks
21
27
CHECKS = {
22
28
"travel_speed" : {
23
29
"pattern" : "{travel_speed}" ,
@@ -34,18 +40,21 @@ class FileCheckPlugin(
34
40
octoprint .plugin .EventHandlerPlugin ,
35
41
octoprint .plugin .SettingsPlugin ,
36
42
octoprint .plugin .SimpleApiPlugin ,
43
+ octoprint .plugin .TemplatePlugin ,
44
+ octoprint .plugin .WizardPlugin ,
37
45
):
38
46
def __init__ (self ):
39
47
self ._executor = ThreadPoolExecutor ()
40
48
41
- self ._native_grep_available = True
49
+ self ._native_grep_available = False
42
50
43
51
self ._full_check_lock = threading .RLock ()
44
52
self ._check_result = {}
45
53
46
54
def initialize (self ):
47
55
try :
48
56
sarge .run (["grep" , "-q" , "--version" ])
57
+ self ._native_grep_available = True
49
58
except Exception as exc :
50
59
if "Command not found" in str (exc ):
51
60
self ._native_grep_available = False
@@ -77,44 +86,29 @@ def on_event(self, event, payload):
77
86
self ._validate_file , payload ["origin" ], payload ["path" ], file_type
78
87
)
79
88
80
- elif event == octoprint .events .Events .FILE_REMOVED :
81
- dirty = True
82
- with self ._full_check_lock :
83
- for check in self ._check_result :
84
- current = len (self ._check_result [check ])
85
- self ._check_result [check ] = [
86
- path
87
- for path in self ._check_result [check ]
88
- if path != f"{ payload ['storage' ]} :{ payload ['path' ]} "
89
- ]
90
- dirty = dirty or len (self ._check_result [check ]) < current
91
- if dirty :
92
- self ._trigger_check_update ()
93
-
94
- elif event == octoprint .events .Events .FOLDER_REMOVED :
95
- dirty = False
96
- with self ._full_check_lock :
97
- for check in self ._check_result :
98
- current = len (self ._check_result [check ])
99
- self ._check_result [check ] = [
100
- path
101
- for path in self ._check_result [check ]
102
- if not path .startswith (f"{ payload ['storage' ]} :{ payload ['path' ]} /" )
103
- ]
104
- dirty = dirty or len (self ._check_result [check ]) < current
105
- if dirty :
106
- self ._trigger_check_update ()
107
-
108
89
##~~ SimpleApiPlugin API
109
90
110
91
def on_api_get (self , request ):
111
92
if not octoprint .access .permissions .Permissions .PLUGIN_FILE_CHECK_RUN .can ():
112
93
return flask .make_response ("Insufficient rights" , 403 )
113
94
95
+ last_check_info = self ._load_last_check_info ()
96
+
114
97
response = {
115
98
"native_grep" : self ._native_grep_available ,
116
- "check_result" : self ._check_result ,
99
+ "last_full_check" : {
100
+ "timestamp" : last_check_info .get ("timestamp" ),
101
+ "current" : last_check_info .get ("version" ) == CHECKS_VERSION ,
102
+ },
117
103
}
104
+
105
+ if octoprint .access .permissions .Permissions .FILES_LIST .can ():
106
+ # only return the check result if the user has permissions
107
+ # to see a file list, otherwise we might leak data
108
+ response [
109
+ "check_result"
110
+ ] = self ._gather_from_local_metadata () # TODO: caching?
111
+
118
112
return flask .jsonify (** response )
119
113
120
114
def get_api_commands (self ):
@@ -131,6 +125,39 @@ def on_api_command(self, command, data):
131
125
headers = {"Location" : flask .url_for ("index" ) + "api/plugin/file_check" },
132
126
)
133
127
128
+ ##~~ TemplatePlugin API
129
+
130
+ def get_template_configs (self ):
131
+ if not self ._native_grep_available :
132
+ return []
133
+
134
+ return [
135
+ dict (
136
+ type = "wizard" ,
137
+ template = "file_check_wizard_grep.jinja2" ,
138
+ custom_bindings = True ,
139
+ ),
140
+ dict (
141
+ type = "settings" ,
142
+ template = "file_check_settings_grep.jinja2" ,
143
+ custom_bindings = True ,
144
+ ),
145
+ ]
146
+
147
+ ##~~ WizardPlugin API
148
+
149
+ def is_wizard_required (self ):
150
+ last_check_info = self ._load_last_check_info ()
151
+ first_run = self ._settings .global_get_boolean (["server" , "firstRun" ])
152
+ return (
153
+ self ._native_grep_available
154
+ and last_check_info .get ("version" ) != CHECKS_VERSION
155
+ and not first_run
156
+ )
157
+
158
+ def get_wizard_version (self ):
159
+ return WIZARD_VERSION
160
+
134
161
##~~ Additional permissions hook
135
162
136
163
def get_additional_permissions (self ):
@@ -177,33 +204,26 @@ def get_update_information(self):
177
204
178
205
def _start_full_check (self ):
179
206
with self ._full_check_lock :
180
- self ._check_result = None
181
207
job = self ._executor .submit (self ._check_all_files )
182
208
job .add_done_callback (self ._full_check_done )
183
209
184
210
def _full_check_done (self , future ):
185
211
try :
186
- result = future .result ()
212
+ future .result ()
187
213
except Exception :
188
214
self ._logger .exception ("Full check failed" )
189
215
return
190
-
191
- path_to_checks = defaultdict (list )
192
- for check , matches in result .items ():
193
- for match in matches :
194
- path_to_checks [match ].append (check )
195
-
196
216
self ._trigger_check_update ()
197
217
198
218
def _check_all_files (self ):
199
- with self ._full_check_lock :
200
- if not self ._native_grep_available :
201
- return {}
219
+ if not self ._native_grep_available :
220
+ return {}
202
221
222
+ with self ._full_check_lock :
203
223
path = self ._settings .global_get_basefolder ("uploads" )
204
224
self ._logger .info (f"Running check on all files in { path } (local storage)" )
205
225
206
- full_check_result = {}
226
+ full_check_result = defaultdict ( list )
207
227
for check , params in CHECKS .items ():
208
228
self ._logger .info (f"Running check { check } " )
209
229
pattern = params ["pattern" ]
@@ -224,13 +244,56 @@ def _check_all_files(self):
224
244
if result .returncode == 0 :
225
245
for line in result .stdout .text .splitlines ():
226
246
p , _ = line .split (":" , 1 )
227
- matches .append ("local:" + p .replace (path + os .path .sep , "" ))
247
+ match = p .replace (path + os .path .sep , "" )
248
+ if get_file_type (match )[- 1 ] == "gcode" :
249
+ matches .append (match )
228
250
229
251
self ._logger .info (f"... got { len (matches )} matches" )
230
- full_check_result [check ] = matches
252
+ for match in matches :
253
+ full_check_result [match ].append (check )
254
+
255
+ for f , checks in full_check_result .items ():
256
+ self ._save_to_metadata ("local" , f , checks )
257
+ self ._save_last_check_info ()
258
+
259
+ def _save_last_check_info (self ):
260
+ data = {
261
+ "version" : CHECKS_VERSION ,
262
+ "timestamp" : int (time .time ()),
263
+ }
264
+
265
+ try :
266
+ with open (
267
+ os .path .join (self .get_plugin_data_folder (), "last_check_info.json" ),
268
+ "w" ,
269
+ encoding = "utf-8" ,
270
+ ) as f :
271
+ data = json .dump (data , f )
272
+ except Exception :
273
+ self ._logger .exception (
274
+ "Could not save information about last full file check"
275
+ )
276
+ return
277
+
278
+ def _load_last_check_info (self ):
279
+ path = os .path .join (self .get_plugin_data_folder (), "last_check_info.json" )
280
+ if not os .path .isfile (path ):
281
+ return {}
282
+
283
+ try :
284
+ with open (
285
+ path ,
286
+ encoding = "utf-8" ,
287
+ ) as f :
288
+ data = json .load (f )
289
+ if isinstance (data , dict ) and "version" in data and "timestamp" in data :
290
+ return data
291
+ except Exception :
292
+ self ._logger .exception (
293
+ "Could not load information about last full file check"
294
+ )
231
295
232
- self ._check_result = full_check_result
233
- return full_check_result
296
+ return {}
234
297
235
298
def _validate_file (self , storage , path , file_type ):
236
299
try :
@@ -254,6 +317,7 @@ def _validate_file(self, storage, path, file_type):
254
317
types .append (check )
255
318
256
319
if types :
320
+ self ._save_to_metadata (storage , path , types )
257
321
self ._notify (storage , path , types )
258
322
259
323
def _search_through_file (self , path , pattern , incl_comments = False , regex = False ):
@@ -308,23 +372,56 @@ def _notify(self, storage, path, types):
308
372
f" { t } , see https://faq.octoprint.org/file-check-{ t .replace ('_' , '-' )} for details"
309
373
)
310
374
311
- with self ._full_check_lock :
312
- for t in types :
313
- if t not in self ._check_result :
314
- self ._check_result [t ] = []
315
- if path not in self ._check_result [t ]:
316
- self ._check_result [t ].append (f"{ storage } :{ path } " )
317
-
318
375
self ._plugin_manager .send_plugin_message (
319
376
self ._identifier ,
320
377
{"action" : "notify" , "storage" : storage , "path" : path , "types" : types },
321
378
)
322
379
323
380
def _trigger_check_update (self ):
324
381
self ._plugin_manager .send_plugin_message (
325
- self ._identifier , dict (action = "check_update" )
382
+ self ._identifier , {"action" : "check_update" }
383
+ )
384
+
385
+ def _save_to_metadata (self , storage , path , positive_checks ):
386
+ metadata = {
387
+ "version" : CHECKS_VERSION ,
388
+ "checks" : positive_checks ,
389
+ }
390
+ self ._file_manager .set_additional_metadata (
391
+ storage , path , "file_check" , metadata , overwrite = True
326
392
)
327
393
394
+ def _gather_from_local_metadata (self ):
395
+ uploads = self ._settings .global_get_basefolder ("uploads" )
396
+
397
+ result = {}
398
+ for path in glob .glob (
399
+ os .path .join (uploads , "**" , ".metadata.json" ), recursive = True
400
+ ):
401
+ internal_path = path [len (uploads ) + 1 : - len (".metadata.json" )]
402
+ from_metadata = self ._gather_metadata_from_file (path )
403
+ result .update (
404
+ {f"local:{ internal_path } { k } " : v for k , v in from_metadata .items ()}
405
+ )
406
+ return result
407
+
408
+ def _gather_metadata_from_file (self , path ):
409
+ with open (path , encoding = "utf-8" ) as f :
410
+ metadata = json .load (f )
411
+
412
+ if not isinstance (metadata , dict ):
413
+ return {}
414
+
415
+ result = {}
416
+ for key , value in metadata .items ():
417
+ if (
418
+ "file_check" in value
419
+ and isinstance (value ["file_check" ], dict )
420
+ and len (value ["file_check" ].get ("checks" , []))
421
+ ):
422
+ result [key ] = value ["file_check" ]["checks" ]
423
+ return result
424
+
328
425
329
426
__plugin_name__ = "File Check"
330
427
__plugin_pythoncompat__ = ">3.7,<4"
0 commit comments