Skip to content

Commit 0ba83c0

Browse files
committed
Add prompt logging option
1 parent 98cebb5 commit 0ba83c0

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

src/inference_core_nodes/prompt_expansion/prompt_expansion.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import random
4+
import logging
45
from functools import cache
56
from pathlib import Path
67

@@ -16,6 +17,8 @@
1617

1718
CONFIGS_DIR = Path(__file__).parent.joinpath("configs")
1819

20+
logger = logging.getLogger(__name__)
21+
1922
fooocus_magic_split = [", extremely", ", intricate,"]
2023

2124
disallowed_chars_table = str.maketrans("", "", "[]【】()()|::")
@@ -110,6 +113,7 @@ def INPUT_TYPES(s):
110113
"model_name": (folder_paths.get_filename_list("prompt_expansion"),),
111114
"text": ("STRING", {"multiline": True}),
112115
"seed": ("INT", {"default": 0, "min": 0, "max": 0xFFFFFFFF}),
116+
"log_prompt": ("BOOLEAN", {"default": False})
113117
},
114118
}
115119

@@ -127,7 +131,7 @@ def INPUT_TYPES(s):
127131

128132
@staticmethod
129133
@torch.no_grad()
130-
def expand_prompt(model_name: str, text: str, seed: int, log_prompt: str):
134+
def expand_prompt(model_name: str, text: str, seed: int, log_prompt: bool):
131135
expansion = load_expansion_runner(model_name)
132136

133137
prompt = remove_empty_str([safe_str(text)], default="")[0]
@@ -142,6 +146,10 @@ def expand_prompt(model_name: str, text: str, seed: int, log_prompt: str):
142146

143147
expansion_text = expansion(prompt, seed)
144148
expanded_prompt = join_prompts(prompt, expansion_text)
149+
150+
if log_prompt:
151+
logger.info(f"Prompt: {prompt}")
152+
logger.info(f"Expanded Prompt: {expanded_prompt}")
145153

146154
return expanded_prompt, seed
147155

0 commit comments

Comments
 (0)