|
80 | 80 | _LOGGER = logging.getLogger(__name__) |
81 | 81 |
|
82 | 82 |
|
| 83 | +def _collect_records(data: str) -> dict[int, dict[int, tuple[datetime, int]]]: |
| 84 | + """Collect logs from a cache data string.""" |
| 85 | + logs: dict[int, dict[int, tuple[datetime, int]]] = {} |
| 86 | + log_data = data.split("|") |
| 87 | + for log_record in log_data: |
| 88 | + log_fields = log_record.split(":") |
| 89 | + if len(log_fields) == 4: |
| 90 | + address = int(log_fields[0]) |
| 91 | + slot = int(log_fields[1]) |
| 92 | + pulses = int(log_fields[3]) |
| 93 | + # Parse zero-padded timestamp, fallback to manual split |
| 94 | + try: |
| 95 | + timestamp = datetime.strptime( |
| 96 | + log_fields[2], "%Y-%m-%d-%H-%M-%S" |
| 97 | + ).replace(tzinfo=UTC) |
| 98 | + except ValueError: |
| 99 | + parts = log_fields[2].split("-") |
| 100 | + if len(parts) != 6: |
| 101 | + continue |
| 102 | + timestamp = datetime( |
| 103 | + year=int(parts[0]), |
| 104 | + month=int(parts[1]), |
| 105 | + day=int(parts[2]), |
| 106 | + hour=int(parts[3]), |
| 107 | + minute=int(parts[4]), |
| 108 | + second=int(parts[5]), |
| 109 | + tzinfo=UTC, |
| 110 | + ) |
| 111 | + if logs.get(address) is None: |
| 112 | + logs[address] = {} |
| 113 | + logs[address][slot] = (timestamp, pulses) |
| 114 | + |
| 115 | + return logs |
| 116 | + |
| 117 | + |
83 | 118 | def raise_calibration_missing(func: FuncT) -> FuncT: |
84 | 119 | """Validate energy calibration settings are available.""" |
85 | 120 |
|
@@ -616,40 +651,11 @@ async def _energy_log_records_load_from_cache(self) -> bool: # noqa: PLR0912 |
616 | 651 | "Failed to restore energy log records from cache for node %s", self.name |
617 | 652 | ) |
618 | 653 | return False |
619 | | - restored_logs: dict[int, dict[int, tuple[datetime, int]]] = {} |
620 | 654 | if cache_data == "": |
621 | 655 | _LOGGER.debug("Cache-record is empty") |
622 | 656 | return False |
623 | 657 |
|
624 | | - log_data = cache_data.split("|") |
625 | | - for log_record in log_data: |
626 | | - log_fields = log_record.split(":") |
627 | | - if len(log_fields) == 4: |
628 | | - address = int(log_fields[0]) |
629 | | - slot = int(log_fields[1]) |
630 | | - pulses = int(log_fields[3]) |
631 | | - # Parse zero-padded timestamp, fallback to manual split |
632 | | - try: |
633 | | - timestamp = datetime.strptime( |
634 | | - log_fields[2], "%Y-%m-%d-%H-%M-%S" |
635 | | - ).replace(tzinfo=UTC) |
636 | | - except ValueError: |
637 | | - parts = log_fields[2].split("-") |
638 | | - if len(parts) != 6: |
639 | | - continue |
640 | | - timestamp = datetime( |
641 | | - year=int(parts[0]), |
642 | | - month=int(parts[1]), |
643 | | - day=int(parts[2]), |
644 | | - hour=int(parts[3]), |
645 | | - minute=int(parts[4]), |
646 | | - second=int(parts[5]), |
647 | | - tzinfo=UTC, |
648 | | - ) |
649 | | - if restored_logs.get(address) is None: |
650 | | - restored_logs[address] = {} |
651 | | - restored_logs[address][slot] = (timestamp, pulses) |
652 | | - |
| 658 | + restored_logs = _collect_records(cache_data) |
653 | 659 | # Sort and prune the records loaded from cache |
654 | 660 | sorted_logs: dict[int, dict[int, tuple[datetime, int]]] = {} |
655 | 661 | skip_before = datetime.now(tz=UTC) - timedelta(hours=MAX_LOG_HOURS) |
|
0 commit comments