Skip to content

Commit d738385

Browse files
authored
feat: add r1fs delete file endpoint (#289)
* feat: add r1fs delete file endpoint * fix: r1fs API logging * chore: inc version
1 parent bfab067 commit d738385

File tree

2 files changed

+197
-21
lines changed

2 files changed

+197
-21
lines changed

extensions/business/r1fs/r1fs_manager_api.py

Lines changed: 196 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,61 @@ def on_init(self):
3939
def _log_request_response(self, endpoint_name: str, request_data: dict = None, response_data: dict = None):
4040
"""Helper method to log requests and responses when verbose mode is enabled"""
4141
if hasattr(self, 'cfg_r1fs_verbose') and self.cfg_r1fs_verbose > 10:
42-
if request_data:
43-
self.P(f"=== {endpoint_name} ENDPOINT ===", color='y')
44-
self.P(f"REQUEST: {self.json.dumps(request_data, indent=2)}", color='c')
45-
if response_data:
46-
self.P(f"RESPONSE: {self.json.dumps(response_data, indent=2)}", color='g')
47-
self.P(f"=== END {endpoint_name} ===", color='y')
42+
if request_data is not None:
43+
sanitized_request = self._sanitize_payload(request_data)
44+
self.P(f"[{endpoint_name}] request: {self.json.dumps(sanitized_request)}", color='c')
45+
if response_data is not None:
46+
sanitized_response = self._sanitize_payload(response_data)
47+
self.P(f"[{endpoint_name}] response: {self.json.dumps(sanitized_response)}", color='g')
48+
49+
def _sanitize_payload(self, payload, max_length: int = 64, depth: int = 0, key_path: str = ""):
50+
"""
51+
Sanitize payloads before logging to avoid leaking secrets or large contents.
52+
"""
53+
sensitive_tokens = (
54+
"secret", "key", "token", "pass", "pwd", "credential", "auth",
55+
"signature", "base64", "content", "body", "payload", "data", "yaml",
56+
"json", "pickle"
57+
)
58+
59+
if payload is None:
60+
return None
61+
62+
if depth >= 3:
63+
return "[truncated]"
64+
65+
if isinstance(payload, dict):
66+
sanitized = {}
67+
for key, value in payload.items():
68+
child_path = f"{key_path}.{key}" if key_path else str(key)
69+
sanitized[key] = self._sanitize_payload(value, max_length, depth + 1, child_path)
70+
return sanitized
71+
72+
if isinstance(payload, (list, tuple, set)):
73+
sanitized_iterable = [
74+
self._sanitize_payload(value, max_length, depth + 1, f"{key_path}.{idx}")
75+
for idx, value in enumerate(payload)
76+
]
77+
return sanitized_iterable
78+
79+
if isinstance(payload, bytes):
80+
return f"[bytes len={len(payload)}]"
81+
82+
if isinstance(payload, str):
83+
lower_path = key_path.lower()
84+
if any(token in lower_path for token in sensitive_tokens):
85+
return "***"
86+
if len(payload) > max_length:
87+
return f"{payload[:max_length]}... (len={len(payload)})"
88+
return payload
89+
90+
if isinstance(payload, (int, float, bool)):
91+
return payload
92+
93+
if any(token in key_path.lower() for token in sensitive_tokens):
94+
return "***"
95+
96+
return f"[{payload.__class__.__name__}]"
4897

4998

5099
@BasePlugin.endpoint(method="get", require_token=False)
@@ -93,11 +142,12 @@ def add_file(self, file_path: str, body_json: any = None, secret: str = None, no
93142
}
94143
self._log_request_response("ADD_FILE", request_data=request_data)
95144

96-
self.P(f"Starting add_file with uploaded file at: {file_path}")
97-
self.P(f"Body: {self.json.dumps(body_json, indent=2)}")
98-
145+
self.P(f"Starting add_file for {file_path}")
146+
body_json = body_json or {}
147+
if not isinstance(body_json, dict):
148+
body_json = {}
99149
secret = body_json.get('secret', None)
100-
self.P(f"Extracted secret: {secret}")
150+
self.P(f"Secret provided: {'yes' if secret else 'no'}")
101151

102152
cid = self.r1fs.add_file(file_path=file_path, secret=secret, nonce=nonce)
103153

@@ -139,7 +189,7 @@ def get_file(self, cid: str, secret: str = None):
139189
}
140190
self._log_request_response("GET_FILE", request_data=request_data)
141191

142-
self.P(f"Retrieving file with CID='{cid}', secret='{secret}'...")
192+
self.P(f"Retrieving file with CID='{cid}', secret_provided={'yes' if secret else 'no'}")
143193

144194
fn = self.r1fs.get_file(cid=cid, secret=secret)
145195

@@ -161,7 +211,7 @@ def get_file(self, cid: str, secret: str = None):
161211
'meta': meta
162212
}
163213

164-
self.P(f"response: {self.json.dumps(response, indent=2)}")
214+
self.P(f"GET_FILE completed, file_path set: {bool(fn)}")
165215

166216
# Log response
167217
self._log_request_response("GET_FILE", response_data=response)
@@ -196,7 +246,7 @@ def add_file_base64(self, file_base64_str: str, filename: str = None, secret: st
196246
}
197247
self._log_request_response("ADD_FILE_BASE64", request_data=request_data)
198248

199-
self.P(f"New base64 File={file_base64_str}")
249+
self.P(f"Received base64 payload length={len(file_base64_str) if file_base64_str else 0}")
200250
if not filename:
201251
filename = self.r1fs._get_unique_or_complete_upload_name()
202252

@@ -254,7 +304,7 @@ def get_file_base64(self, cid: str, secret: str = None): # first parameter must
254304
filename = file.split('/')[-1] if file else None
255305
self.P(f"File retrieved: {file}")
256306
file_base64 = self.diskapi_load_r1fs_file(file, verbose=True, to_base64=True)
257-
self.P("file retrieved: {}".format(file_base64))
307+
self.P(f"Encoded payload length={len(file_base64) if file_base64 else 0}")
258308

259309
data = {
260310
"file_base64_str": file_base64,
@@ -299,7 +349,8 @@ def add_yaml(self, data: dict, fn: str = None, secret: str = None, nonce: int =
299349
}
300350
self._log_request_response("ADD_YAML", request_data=request_data)
301351

302-
self.P(f"Adding data={data} to yaml, secret='{secret}'", color='g')
352+
yaml_keys = list(data.keys()) if isinstance(data, dict) else type(data).__name__
353+
self.P(f"Adding YAML payload with keys={yaml_keys}, secret_provided={'yes' if secret else 'no'}", color='g')
303354

304355
cid = self.r1fs.add_yaml(data=data, fn=fn, secret=secret)
305356
self.P(f"Cid='{cid}'")
@@ -335,14 +386,14 @@ def get_yaml(self, cid: str, secret: str = None):
335386
# Log request
336387
request_data = {
337388
'cid': cid,
338-
'secret': secret
389+
'secret': "***" if secret else None,
339390
}
340391
self._log_request_response("GET_YAML", request_data=request_data)
341392

342-
self.P(f"Retrieving file with CID='{cid}', secret='{secret}'...")
393+
self.P(f"Retrieving YAML with CID='{cid}', secret_provided={'yes' if secret else 'no'}")
343394

344395
fn = self.r1fs.get_file(cid=cid, secret=secret)
345-
self.P(f"fn: {fn}")
396+
self.P(f"Retrieved file path: {fn}")
346397

347398
if fn is None:
348399
error_msg = f"Failed to retrieve file with CID '{cid}'. The file may not exist or the IPFS download failed."
@@ -352,7 +403,8 @@ def get_yaml(self, cid: str, secret: str = None):
352403

353404
if fn.endswith('.yaml') or fn.endswith('.yml'):
354405
file_data = self.diskapi_load_yaml(fn, verbose=False)
355-
self.P(f"File found: {file_data}")
406+
summary = list(file_data.keys()) if isinstance(file_data, dict) else type(file_data).__name__
407+
self.P(f"Parsed YAML payload summary: {summary}")
356408

357409
else:
358410
self.P(f"Error retrieving file: {fn}")
@@ -365,7 +417,11 @@ def get_yaml(self, cid: str, secret: str = None):
365417
}
366418

367419
# Log response
368-
self._log_request_response("GET_YAML", response_data=data)
420+
response_summary = {
421+
'file_data_type': type(file_data).__name__,
422+
'file_data_keys': list(file_data.keys()) if isinstance(file_data, dict) else None
423+
}
424+
self._log_request_response("GET_YAML", response_data=response_summary)
369425

370426
return data
371427

@@ -534,5 +590,125 @@ def calculate_pickle_cid(self, data: object, nonce: int, fn: str = None, secret:
534590

535591
return data
536592

593+
594+
@BasePlugin.endpoint(method="post", require_token=False)
595+
def delete_file(
596+
self,
597+
cid: str,
598+
unpin_remote: bool = True,
599+
run_gc: bool = False,
600+
cleanup_local_files: bool = False
601+
):
602+
"""
603+
Delete a file from R1FS by unpinning it locally and optionally on the relay.
604+
605+
This endpoint removes a file from the decentralized file system by unpinning it.
606+
The file is marked for garbage collection and will be removed when GC runs.
607+
608+
Args:
609+
cid (str): Content Identifier of the file to delete
610+
unpin_remote (bool, optional): Whether to also unpin from the relay. Default is True
611+
run_gc (bool, optional): Whether to run garbage collection immediately. Default is False
612+
cleanup_local_files (bool, optional): Whether to remove local downloaded files. Default is False
613+
614+
Returns:
615+
dict: Response containing success status and message:
616+
- success: Boolean indicating if deletion was successful
617+
- message: Status message
618+
- cid: The CID that was deleted
619+
"""
620+
# Log request
621+
request_data = {
622+
'cid': cid,
623+
'unpin_remote': unpin_remote,
624+
'run_gc': run_gc,
625+
'cleanup_local_files': cleanup_local_files
626+
}
627+
self._log_request_response("DELETE_FILE", request_data=request_data)
628+
629+
self.P(f"Deleting file with CID='{cid}', unpin_remote={unpin_remote}, run_gc={run_gc}")
630+
631+
success = self.r1fs.delete_file(
632+
cid=cid,
633+
unpin_remote=unpin_remote,
634+
run_gc=run_gc,
635+
cleanup_local_files=cleanup_local_files,
636+
show_logs=True,
637+
raise_on_error=False
638+
)
639+
640+
if success:
641+
message = f"File {cid} deleted successfully"
642+
self.P(message, color='g')
643+
else:
644+
message = f"Failed to delete file {cid}"
645+
self.P(message, color='r')
646+
647+
response = {
648+
"success": success,
649+
"message": message,
650+
"cid": cid
651+
}
652+
653+
# Log response
654+
self._log_request_response("DELETE_FILE", response_data=response)
655+
656+
return response
657+
658+
659+
@BasePlugin.endpoint(method="post", require_token=False)
660+
def delete_files(
661+
self,
662+
cids: list,
663+
unpin_remote: bool = True,
664+
run_gc_after_all: bool = True,
665+
cleanup_local_files: bool = False
666+
):
667+
"""
668+
Delete multiple files from R1FS in bulk.
669+
670+
This endpoint removes multiple files from the decentralized file system by
671+
unpinning them. More efficient than calling delete_file repeatedly as it
672+
can run garbage collection once at the end.
673+
674+
Args:
675+
cids (list): List of Content Identifiers to delete
676+
unpin_remote (bool, optional): Whether to also unpin from the relay. Default is True
677+
run_gc_after_all (bool, optional): Whether to run GC once after all deletions. Default is True
678+
cleanup_local_files (bool, optional): Whether to remove local downloaded files. Default is False
679+
680+
Returns:
681+
dict: Response containing deletion results:
682+
- success: List of successfully deleted CIDs
683+
- failed: List of CIDs that failed to delete
684+
- total: Total number of CIDs processed
685+
- success_count: Number of successful deletions
686+
- failed_count: Number of failed deletions
687+
"""
688+
# Log request
689+
request_data = {
690+
'cids': cids,
691+
'unpin_remote': unpin_remote,
692+
'run_gc_after_all': run_gc_after_all,
693+
'cleanup_local_files': cleanup_local_files
694+
}
695+
self._log_request_response("DELETE_FILES", request_data=request_data)
696+
697+
self.P(f"Bulk deleting {len(cids)} files, unpin_remote={unpin_remote}, run_gc_after_all={run_gc_after_all}")
698+
699+
result = self.r1fs.delete_files(
700+
cids=cids,
701+
unpin_remote=unpin_remote,
702+
run_gc_after_all=run_gc_after_all,
703+
cleanup_local_files=cleanup_local_files,
704+
show_logs=True,
705+
raise_on_error=False
706+
)
707+
708+
# Log response
709+
self._log_request_response("DELETE_FILES", response_data=result)
710+
711+
return result
712+
537713
#########################################################################
538714
#########################################################################

ver.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__VER__ = '2.9.823'
1+
__VER__ = '2.9.824'

0 commit comments

Comments
 (0)