Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/internals/frontends.rst
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,11 @@ On POSIX systems, you can usually set environment variables to choose a UTF-8 lo
export LANG=en_US.UTF-8
export LC_CTYPE=en_US.UTF-8

.. note::

`Pydantic <https://docs.pydantic.dev/>`_ models are available in
``src/borg/public/cli_api/v1.py``
that can parse Borg's JSON log lines and stdout objects to make the job easier for frontends.

Logging
-------
Expand Down
1 change: 1 addition & 0 deletions requirements.d/development.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,4 @@ pytest-cov
pytest-benchmark
Cython
pre-commit
pydantic>=2.0
4 changes: 2 additions & 2 deletions src/borg/archive.py
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ def csize_fmt(self):

PROGRESS_FMT = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '

def show_progress(self, item=None, final=False, stream=None, dt=None):
def show_progress(self, item=None, final=False, stream=None, dt=None, override_time=None):
now = time.monotonic()
if dt is None or now - self.last_progress > dt:
stream = stream or sys.stderr
Expand All @@ -155,7 +155,7 @@ def show_progress(self, item=None, final=False, stream=None, dt=None):
else:
data = {}
data.update({
'time': time.time(),
'time': override_time if override_time is not None else time.time(),
'type': 'archive_progress',
'finished': final,
})
Expand Down
8 changes: 5 additions & 3 deletions src/borg/helpers/progress.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import logging
import json
import logging
import sys
import time
import typing
from shutil import get_terminal_size

from ..logger import create_logger

logger = create_logger()

from .parseformat import ellipsis_truncate
Expand Down Expand Up @@ -75,7 +77,7 @@ def __del__(self):
self.logger.removeHandler(self.handler)
self.handler.close()

def output_json(self, *, finished=False, **kwargs):
def output_json(self, *, finished=False, override_time: typing.Optional[float] = None, **kwargs):
assert self.json
if not self.emit:
return
Expand All @@ -84,7 +86,7 @@ def output_json(self, *, finished=False, **kwargs):
msgid=self.msgid,
type=self.JSON_TYPE,
finished=finished,
time=time.time(),
time=override_time if override_time is not None else time.time(),
))
print(json.dumps(kwargs), file=sys.stderr, flush=True)

Expand Down
196 changes: 196 additions & 0 deletions src/borg/public/cli_api/v1.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,196 @@
"""Pydantic models that can parse borg 1.x's JSON output.

The two top-level models are:

- `BorgLogLine`, which parses any line of borg's logging output,
- all `Borg*Result` classes, which parse the final JSON output of some borg commands.

The different types of log lines are defined in the other models.
"""

import json
import logging
import typing
from datetime import datetime
from pathlib import Path

import pydantic
import typing_extensions

_log = logging.getLogger(__name__)


class BaseBorgLogLine(pydantic.BaseModel):
def get_level(self) -> int:
"""Get the log level for this line as a `logging` level value.

If this is a log message with a levelname, use it.
Otherwise, progress messages get `DEBUG` level, and other messages get `INFO`.
"""
return logging.DEBUG


class ArchiveProgressLogLine(BaseBorgLogLine):
original_size: int
compressed_size: int
deduplicated_size: int
nfiles: int
path: Path
time: float


class FinishedArchiveProgress(BaseBorgLogLine):
"""JSON object printed on stdout when an archive is finished."""

time: float
type: typing.Literal["archive_progress"]
finished: bool


class ProgressMessage(BaseBorgLogLine):
operation: int
msgid: typing.Optional[str]
finished: bool
message: typing.Optional[str] = pydantic.Field(None)
time: float


class ProgressPercent(BaseBorgLogLine):
operation: int
msgid: typing.Optional[str] = pydantic.Field(None)
finished: bool
message: typing.Optional[str] = pydantic.Field(None)
current: typing.Optional[float] = pydantic.Field(None)
info: typing.Optional[typing.List[str]] = pydantic.Field(None)
total: typing.Optional[float] = pydantic.Field(None)
time: float

@pydantic.model_validator(mode="after")
def fields_depending_on_finished(self) -> typing_extensions.Self:
if self.finished:
if self.message is not None:
raise ValueError("message must be None if finished is True")
if self.current is not None and self.total is not None and self.current != self.total:
raise ValueError("current must be equal to total if finished is True")
if self.info is not None:
raise ValueError("info must be None if finished is True")
if self.total is not None:
raise ValueError("total must be None if finished is True")
return self


class FileStatus(BaseBorgLogLine):
status: str
path: Path


class LogMessage(BaseBorgLogLine):
time: float
levelname: typing.Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
name: str
message: str
msgid: typing.Optional[str] = pydantic.Field(None)

def get_level(self) -> int:
try:
return getattr(logging, self.levelname)
except AttributeError:
_log.warning(
"could not find log level %s, giving the following message WARNING level: %s",
self.levelname,
json.dumps(self),
)
return logging.WARNING


_BorgLogLinePossibleTypes = typing.Union[
ArchiveProgressLogLine, FinishedArchiveProgress, ProgressMessage, ProgressPercent, FileStatus, LogMessage
]


class BorgLogLine(pydantic.RootModel[_BorgLogLinePossibleTypes]):
"""A log line from Borg with the `--log-json` argument."""

def get_level(self) -> int:
return self.root.get_level()


class _BorgArchive(pydantic.BaseModel):
"""Basic archive attributes."""

name: str
id: str
start: datetime


class _BorgArchiveStatistics(pydantic.BaseModel):
"""Statistics of an archive."""

original_size: int
compressed_size: int
deduplicated_size: int
nfiles: int


class _BorgLimitUsage(pydantic.BaseModel):
"""Usage of borg limits by an archive."""

max_archive_size: float


class _BorgChunkerParams(pydantic.BaseModel):
"""Chunker parameters tuple.

Format: (algorithm, min_exp, max_exp, mask_bits, window_size)
"""

algorithm: typing.Literal["buzhash", "fixed"]
min_exp: int
max_exp: int
mask_bits: int
window_size: int

@pydantic.model_validator(mode="before")
@classmethod
def parse_list(cls, data: typing.Any) -> typing.Any:
"""Parse from list format [algorithm, min_exp, max_exp, mask_bits, window_size]."""
if isinstance(data, list) and len(data) == 5:
return {
"algorithm": data[0],
"min_exp": data[1],
"max_exp": data[2],
"mask_bits": data[3],
"window_size": data[4],
}
return data


class _BorgDetailedArchive(_BorgArchive):
"""Archive attributes, as printed by `json info` or `json create`."""

end: datetime
duration: float
stats: _BorgArchiveStatistics
limits: _BorgLimitUsage
command_line: typing.List[str]
chunker_params: typing.Optional[_BorgChunkerParams] = None

@pydantic.field_validator("chunker_params", mode="before")
@classmethod
def empty_string_to_none(cls, v: typing.Any) -> typing.Any:
"""Convert empty string to None (for old archives without chunker_params)."""
if v == "":
return None
return v


class BorgCreateResult(pydantic.BaseModel):
"""JSON object printed at the end of `borg create`."""

archive: _BorgDetailedArchive


class BorgListResult(pydantic.BaseModel):
"""JSON object printed at the end of `borg list`."""

archives: typing.List[_BorgArchive]
Loading
Loading