Skip to content

Commit 00c29e2

Browse files
authored
Merge pull request #35 from Textualize/headers
Headers
2 parents 54b455a + 01a3980 commit 00c29e2

File tree

3 files changed

+29
-12
lines changed

3 files changed

+29
-12
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "toolong"
3-
version = "1.2.0"
3+
version = "1.2.1"
44
description = "A terminal log file viewer / tailer / analyzer"
55
authors = ["Will McGugan <will@textualize.io>"]
66
license = "MIT"

src/toolong/log_file.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -214,16 +214,15 @@ def scan_timestamps(
214214
scan_time = monotonic()
215215
scan = self.timestamp_scanner.scan
216216
line_no = 0
217-
timestamp = self.get_create_time() or datetime.now()
218217
position = 0
219218
results: list[tuple[int, int, float]] = []
220219
append = results.append
221220
get_length = results.__len__
222221
while line_bytes := log_mmap.readline():
223222
line = line_bytes.decode("utf-8", errors="replace")
224-
timestamp = scan(line) or timestamp
225-
append((line_no, position, timestamp.timestamp()))
223+
timestamp = scan(line)
226224
position += len(line_bytes)
225+
append((line_no, position, timestamp.timestamp() if timestamp else 0.0))
227226
line_no += 1
228227
if (
229228
results

src/toolong/log_lines.py

Lines changed: 26 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -342,8 +342,7 @@ def run_scan(self, save_merge: str | None = None) -> None:
342342
def merge_log_files(self) -> None:
343343
worker = get_current_worker()
344344
self._merge_lines = []
345-
merge_lines: list[tuple[float, int, LogFile]] = self._merge_lines
346-
append_meta = merge_lines.append
345+
merge_lines = self._merge_lines
347346

348347
for log_file in self.log_files:
349348
try:
@@ -363,13 +362,17 @@ def merge_log_files(self) -> None:
363362
for log_file in self.log_files:
364363
if not log_file.is_open:
365364
continue
366-
append = self._line_breaks[log_file].append
365+
line_breaks = self._line_breaks[log_file]
366+
append = line_breaks.append
367+
meta: list[tuple[float, int, LogFile]] = []
368+
append_meta = meta.append
367369
for timestamps in log_file.scan_timestamps():
368370
break_position = 0
371+
369372
for line_no, break_position, timestamp in timestamps:
370-
if break_position:
371-
append_meta((timestamp, line_no, log_file))
372-
append(break_position)
373+
append_meta((timestamp, line_no, log_file))
374+
append(break_position)
375+
append(log_file.size)
373376

374377
self.post_message(
375378
ScanProgress(
@@ -383,6 +386,20 @@ def merge_log_files(self) -> None:
383386
)
384387
return
385388

389+
# Header may be missing timestamp, so we will attempt to back fill timestamps
390+
seconds = 0.0
391+
for offset, (seconds, line_no, log_file) in enumerate(meta):
392+
if seconds:
393+
for index, (_seconds, line_no, log_file) in zip(
394+
range(offset), meta
395+
):
396+
meta[index] = (seconds, line_no, log_file)
397+
break
398+
if offset > 10:
399+
# May be pointless to scan the entire thing
400+
break
401+
self._merge_lines.extend(meta)
402+
386403
position += log_file.size
387404

388405
merge_lines.sort(key=itemgetter(0, 1))
@@ -447,11 +464,12 @@ def get_log_file_from_index(self, index: int) -> tuple[LogFile, int]:
447464
def index_to_span(self, index: int) -> tuple[LogFile, int, int]:
448465
log_file, index = self.get_log_file_from_index(index)
449466
line_breaks = self._line_breaks.setdefault(log_file, [])
467+
scan_start = 0 if self._merge_lines else self._scan_start
450468
if not line_breaks:
451-
return (log_file, self._scan_start, self._scan_start)
469+
return (log_file, scan_start, self._scan_start)
452470
index = clamp(index, 0, len(line_breaks))
453471
if index == 0:
454-
return (log_file, self._scan_start, line_breaks[0])
472+
return (log_file, scan_start, line_breaks[0])
455473
start = line_breaks[index - 1]
456474
end = (
457475
line_breaks[index]

0 commit comments

Comments
 (0)