118118{payload_b64}
119119__DEEPAGENTS_EDIT_EOF__
120120"""
121+ # Make sure to maintain a new line at the end of DEEPAGENTS_EDIT_EOF to denote end of
122+ # feed. This may not matter for some integrations.
123+
121124"""Server-side file edit via `execute()`.
122125
123126Reads the file, performs string replacement, and writes back — all on the
203206_READ_COMMAND_TEMPLATE = """python3 -c "
204207import os, sys, base64, json
205208
209+ MAX_OUTPUT_BYTES = 500 * 1024
210+ MAX_BINARY_BYTES = 500 * 1024
211+ TRUNCATION_MSG = '\\ n\\ n' + (
212+ '[Output was truncated due to size limits. '
213+ 'This paginated read result exceeded the sandbox stdout limit. '
214+ 'Continue reading with a larger offset or smaller limit to inspect the rest of the file.]'
215+ )
216+
206217path = base64.b64decode('{path_b64}').decode('utf-8')
207218
208219if not os.path.isfile(path):
213224 print(json.dumps({{'encoding': 'utf-8', 'content': 'System reminder: File exists but has empty contents'}}))
214225 sys.exit(0)
215226
227+ file_type = '{file_type}'
228+ if file_type != 'text':
229+ file_size = os.path.getsize(path)
230+ if file_size > MAX_BINARY_BYTES:
231+ print(json.dumps({{'error': 'Binary file exceeds maximum preview size of ' + str(MAX_BINARY_BYTES) + ' bytes'}}))
232+ sys.exit(0)
233+ with open(path, 'rb') as f:
234+ raw = f.read()
235+ print(json.dumps({{'encoding': 'base64', 'content': base64.b64encode(raw).decode('ascii')}}))
236+ sys.exit(0)
237+
216238with open(path, 'rb') as f:
217- raw = f.read()
239+ raw_prefix = f.read(8192 )
218240
219241try:
220- text = raw .decode('utf-8')
242+ raw_prefix .decode('utf-8')
221243except UnicodeDecodeError:
244+ with open(path, 'rb') as f:
245+ raw = f.read()
222246 print(json.dumps({{'encoding': 'base64', 'content': base64.b64encode(raw).decode('ascii')}}))
223247 sys.exit(0)
224248
225- file_type = '{file_type}'
226- if file_type == 'text':
227- lines = text.splitlines()
228- offset = {offset}
229- limit = {limit}
230- if offset >= len(lines):
231- print(json.dumps({{'error': 'Line offset ' + str(offset) + ' exceeds file length (' + str(len(lines)) + ' lines)'}}))
232- sys.exit(0)
233- text = chr(10).join(lines[offset:offset + limit])
249+ offset = {offset}
250+ limit = {limit}
251+ line_count = 0
252+ returned_lines = 0
253+ truncated = False
254+ parts = []
255+ current_bytes = 0
256+ msg_bytes = len(TRUNCATION_MSG.encode('utf-8'))
257+ effective_limit = MAX_OUTPUT_BYTES - msg_bytes
258+
259+ with open(path, 'r', encoding='utf-8', newline=None) as f:
260+ for raw_line in f:
261+ line_count += 1
262+ if line_count <= offset:
263+ continue
264+ if returned_lines >= limit:
265+ break
266+
267+ line = raw_line.rstrip('\\ n').rstrip('\\ r')
268+ piece = line if returned_lines == 0 else '\\ n' + line
269+ piece_bytes = len(piece.encode('utf-8'))
270+ if current_bytes + piece_bytes > effective_limit:
271+ truncated = True
272+ remaining_bytes = effective_limit - current_bytes
273+ if remaining_bytes > 0:
274+ prefix = piece.encode('utf-8')[:remaining_bytes].decode('utf-8', errors='ignore')
275+ if prefix:
276+ parts.append(prefix)
277+ current_bytes += len(prefix.encode('utf-8'))
278+ break
279+
280+ parts.append(piece)
281+ current_bytes += piece_bytes
282+ returned_lines += 1
283+
284+ if returned_lines == 0 and not truncated:
285+ print(json.dumps({{'error': 'Line offset ' + str(offset) + ' exceeds file length (' + str(line_count) + ' lines)'}}))
286+ sys.exit(0)
287+
288+ text = ''.join(parts)
289+ if truncated:
290+ text += TRUNCATION_MSG
234291
235292print(json.dumps({{'encoding': 'utf-8', 'content': text}}))
236293" 2>&1"""
@@ -327,7 +384,11 @@ def read(
327384
328385 Runs a Python script on the sandbox via `execute()` that reads the
329386 file, detects encoding, and applies offset/limit pagination for text
330- files. Only the requested page is returned over the wire.
387+ files. Only the requested page is returned over the wire, and text
388+ output is capped to about 500 KiB to avoid backend stdout/log transport
389+ failures. When that cap is exceeded, the returned content is truncated
390+ with guidance to continue pagination using a different `offset` or
391+ smaller `limit`.
331392
332393 Binary files (non-UTF-8) are returned base64-encoded without
333394 pagination.
@@ -384,9 +445,6 @@ def write(
384445 ) -> WriteResult :
385446 """Create a new file, failing if it already exists.
386447
387- Runs a small preflight command to check existence and create parent
388- directories, then transfers content via `upload_files()`.
389-
390448 Args:
391449 file_path: Absolute path for the new file.
392450 content: UTF-8 text content to write.
@@ -395,31 +453,23 @@ def write(
395453 `WriteResult` with `path` on success or `error` on failure.
396454 """
397455 # Existence check + mkdir. There is a TOCTOU window between this check
398- # and the upload below — a concurrent process could create the file in
456+ # and the upload below - a concurrent process could create the file in
399457 # between. This is an inherent limitation of splitting the operation;
400- # the risk is minimal in single-agent sandbox environments.
401458 path_b64 = base64 .b64encode (file_path .encode ("utf-8" )).decode ("ascii" )
402459 check_cmd = _WRITE_CHECK_TEMPLATE .format (path_b64 = path_b64 )
403- try :
404- result = self .execute (check_cmd )
405- except Exception as exc : # noqa: BLE001 # defense-in-depth for buggy subclass execute()
406- msg = f"Failed to write file '{ file_path } ': { exc } "
407- return WriteResult (error = msg )
408-
460+ result = self .execute (check_cmd )
409461 if result .exit_code != 0 or "Error:" in result .output :
410462 error_msg = result .output .strip () or f"Failed to write file '{ file_path } '"
411463 return WriteResult (error = error_msg )
412464
413- # Transfer content via upload_files()
414- try :
415- responses = self .upload_files ([(file_path , content .encode ("utf-8" ))])
416- except Exception as exc : # noqa: BLE001 # defense-in-depth for buggy subclass upload_files()
417- msg = f"Failed to write file '{ file_path } ': { exc } "
418- return WriteResult (error = msg )
465+ responses = self .upload_files ([(file_path , content .encode ("utf-8" ))])
419466 if not responses :
420- return WriteResult (error = f"Failed to write file '{ file_path } ': upload returned no response" )
421- if responses [0 ].error :
422- return WriteResult (error = f"Failed to write file '{ file_path } ': { responses [0 ].error } " )
467+ # An unreachable condition was reached
468+ msg = f"Responses was expected to return 1 result, but it returned { len (responses )} with type { type (responses )} "
469+ raise AssertionError (msg )
470+ response = responses [0 ]
471+ if response .error :
472+ return WriteResult (error = f"Failed to write file '{ file_path } ': { response .error } " )
423473
424474 return WriteResult (path = file_path )
425475
@@ -678,6 +728,9 @@ def upload_files(self, files: list[tuple[str, bytes]]) -> list[FileUploadRespons
678728
679729 Implementations must support partial success - catch exceptions per-file
680730 and return errors in `FileUploadResponse` objects rather than raising.
731+
732+ Upload files is responsible for ensuring that the parent path exists
733+ (if user permissions allow the user to write to the given directory)
681734 """
682735
683736 @abstractmethod
0 commit comments