Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion appdaemon/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -335,7 +335,8 @@ def main(self): # noqa: C901
print(e)
sys.exit(1)

self.logging = Logging(model.model_dump(mode='python')['logs'], args.debug)
log_cfg = model.model_dump(mode='python', by_alias=True)['logs']
self.logging = Logging(log_cfg, args.debug)
self.logger = self.logging.get_logger()

if "time_zone" in config["appdaemon"]:
Expand Down
4 changes: 2 additions & 2 deletions appdaemon/appdaemon.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,11 @@ class AppDaemon(metaclass=Singleton):

def __init__(self, logging: "Logging", loop: BaseEventLoop, ad_config_model: AppDaemonConfig):
self.logging = logging
self.logging.register_ad(self)
self.logger = logging.get_logger()
self.loop = loop
self.config = ad_config_model
self.booted = "booting"
self.logger = logging.get_logger()
self.logging.register_ad(self) # needs to go last to reference the config object

self.global_vars = {}
self.main_thread_id = threading.current_thread().ident
Expand Down
12 changes: 11 additions & 1 deletion appdaemon/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,12 @@ class AppNameFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None, style=None):
super().__init__(fmt, datefmt, style)

def usesTime(self) -> bool:
"""
Override to ensure asctime is always available, as LogSubscriptionHandler depends on it being available.
"""
return True

def format(self, record):
#
# Figure out the name of the app and add it to the LogRecord
Expand Down Expand Up @@ -296,7 +302,7 @@ def __init__(self, config: Optional[Dict] = None, log_level: str = "INFO"):
)
)
args["logger"] = logger
logger.setLevel(log_level)
logger.setLevel(args.get("level", "INFO"))
logger.propagate = False
if args["filename"] == "STDOUT":
handler = logging.StreamHandler(stream=sys.stdout)
Expand Down Expand Up @@ -377,6 +383,10 @@ def register_ad(self, ad: "AppDaemon"):
"""Adds a reference to the top-level ``AppDaemon`` object. This is necessary because the Logging object gets created first."""
self.AD = ad

# set module debug levels
for name, level in self.AD.module_debug.root.items():
logging.getLogger(name).setLevel(level)

# Log Subscriptions

for log in self.config:
Expand Down
16 changes: 15 additions & 1 deletion appdaemon/models/config/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,21 @@
LogLevel = Literal["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]


CoercedPath = Annotated[Path, BeforeValidator(lambda v: Path(v).resolve())]
def coerce_path(v: Any) -> Path | Literal["STDOUT", "STDERR"]:
"""Coerce a string or Path to a resolved Path."""
match v:
case Path():
pass
case "STDOUT" | "STDERR":
return v
case str():
v = Path(v)
case _:
raise ValidationError(f"Invalid type for path: {v}")
return v.resolve() if not v.is_absolute() else v


CoercedPath = Annotated[Path | Literal["STDOUT", "STDERR"], BeforeValidator(coerce_path)]


def validate_timedelta(v: Any):
Expand Down
4 changes: 2 additions & 2 deletions appdaemon/models/config/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
"main_log": 'AppDaemon',
"error_log": 'Error',
"access_log": 'Access',
"diag_log": 'Diagnostic',
"diag_log": 'Diag',
}


class AppDaemonLogConfig(BaseModel):
filename: CoercedPath | None = None
filename: CoercedPath = "STDOUT"
name: str | None = None
level: LogLevel = 'INFO'
log_generations: int = 3
Expand Down
2 changes: 2 additions & 0 deletions appdaemon/plugin_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class PluginBase(abc.ABC):
name: str
config: PluginConfig
logger: Logger
diag: Logger
plugin_meta: Dict[str, Dict]
plugins: Dict[str, Dict]

Expand Down Expand Up @@ -58,6 +59,7 @@ def __init__(self, ad: "AppDaemon", name: str, config: PluginConfig):
self.name = name
self.config = config
self.logger = self.AD.logging.get_child(name)
self.diag = self.AD.logging.get_diag()
self.error = self.logger
self.connect_event = asyncio.Event()
self.ready_event = asyncio.Event()
Expand Down
26 changes: 26 additions & 0 deletions appdaemon/plugins/hass/hassplugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ def __init__(self, ad: "AppDaemon", name: str, config: HASSConfig):
# Internal state flags
self.stopping = False

self.service_logger = self.diag.getChild("services")
self.logger.info("HASS Plugin initialization complete")

def stop(self):
Expand Down Expand Up @@ -613,6 +614,7 @@ async def get_hass_services(self):
await self.check_register_service(s["domain"], s["services"], silent=True)
else:
self.logger.debug("Updated internal service registry")
self._dump_services("ha")

self.services = services
return services
Expand All @@ -621,6 +623,30 @@ async def get_hass_services(self):
self.logger.warning("Error getting services - retrying")
raise

def _compare_services(self, typ: Literal["ha", "ad"]) -> dict[str, set[str]]:
match typ:
case "ha":
# This gets the names of all the services as they come back from the get_hass_services method that gets
# called when the plugin starts and at the interval defined by services_sleep_time in the plugin config.
services = {
info["domain"]: set(info["services"].keys())
for info in self.services
}
case "ad":
# This gets the names of all the services as they're stored in the services subsystem
services = {
domain: set(services.keys())
for domain, services in self.AD.services.services[self.namespace].items()
}
case _:
services = {}
return services

def _dump_services(self, typ: Literal["ha", "ad"]) -> None:
services = self._compare_services(typ)
service_str = json.dumps(services, indent=4, sort_keys=True, default=str)
self.service_logger.debug(f"Services ({typ}):\n{service_str}")

def time_str(self, now: float | None = None) -> str:
return utils.time_str(self.start, now)

Expand Down
Loading