Skip to content

Commit e7e0449

Browse files
committed
apply ruff fixes
1 parent 57c41dc commit e7e0449

File tree

6 files changed

+56
-47
lines changed

6 files changed

+56
-47
lines changed

docs/examples_notebooks/index_migration_to_v1.ipynb

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -202,11 +202,10 @@
202202
"metadata": {},
203203
"outputs": [],
204204
"source": [
205-
"from graphrag.index.flows.generate_text_embeddings import generate_text_embeddings\n",
206-
"\n",
207205
"from graphrag.cache.factory import CacheFactory\n",
208206
"from graphrag.callbacks.noop_workflow_callbacks import NoopWorkflowCallbacks\n",
209207
"from graphrag.config.embeddings import get_embedded_fields, get_embedding_settings\n",
208+
"from graphrag.index.flows.generate_text_embeddings import generate_text_embeddings\n",
210209
"\n",
211210
"# We only need to re-run the embeddings workflow, to ensure that embeddings for all required search fields are in place\n",
212211
"# We'll construct the context and run this function flow directly to avoid everything else\n",

examples_notebooks/community_contrib/yfiles-jupyter-graphs/graph-visualization.ipynb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,6 @@
2929
"\n",
3030
"import pandas as pd\n",
3131
"import tiktoken\n",
32-
"from graphrag.query.llm.oai.chat_openai import ChatOpenAI\n",
33-
"from graphrag.query.llm.oai.embedding import OpenAIEmbedding\n",
34-
"from graphrag.query.llm.oai.typing import OpenaiApiType\n",
3532
"\n",
3633
"from graphrag.query.context_builder.entity_extraction import EntityVectorStoreKey\n",
3734
"from graphrag.query.indexer_adapters import (\n",
@@ -41,6 +38,9 @@
4138
" read_indexer_reports,\n",
4239
" read_indexer_text_units,\n",
4340
")\n",
41+
"from graphrag.query.llm.oai.chat_openai import ChatOpenAI\n",
42+
"from graphrag.query.llm.oai.embedding import OpenAIEmbedding\n",
43+
"from graphrag.query.llm.oai.typing import OpenaiApiType\n",
4444
"from graphrag.query.structured_search.local_search.mixed_context import (\n",
4545
" LocalSearchMixedContext,\n",
4646
")\n",

graphrag/api/index.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,7 @@ async def build_index(
4949
"""
5050
# Register pipeline logger with the graphrag logger
5151
from graphrag.logger.standard_logging import init_loggers
52+
5253
init_loggers(config=config.reporting, root_dir=None, enable_console=False)
5354

5455
# Create a logging-based workflow callbacks for pipeline lifecycle events

graphrag/config/logging.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,14 +22,14 @@ def enable_logging(log_filepath: str | Path, verbose: bool = False) -> None:
2222
Whether to log debug messages.
2323
"""
2424
log_level = logging.DEBUG if verbose else logging.INFO
25-
25+
2626
# Use init_loggers with the specific log file and custom formatting
2727
init_loggers(
2828
log_level=log_level,
2929
log_file=log_filepath,
3030
log_format="%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s",
3131
date_format="%H:%M:%S",
32-
enable_console=False
32+
enable_console=False,
3333
)
3434

3535

graphrag/logger/standard_logging.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -42,18 +42,20 @@
4242

4343
def get_logger(name: str) -> logging.Logger:
4444
"""Get a logger with the given name under the graphrag hierarchy.
45-
45+
4646
Parameters
4747
----------
4848
name : str
4949
The name of the logger. Typically pass __name__ to get module-specific logger.
50-
50+
5151
Returns
5252
-------
5353
logging.Logger
5454
A logger configured for the graphrag package.
5555
"""
56-
return logging.getLogger(f"graphrag.{name}" if not name.startswith("graphrag") else name)
56+
return logging.getLogger(
57+
f"graphrag.{name}" if not name.startswith("graphrag") else name
58+
)
5759

5860

5961
def init_loggers(
@@ -66,10 +68,10 @@ def init_loggers(
6668
date_format: str = "%Y-%m-%d %H:%M:%S",
6769
) -> None:
6870
"""Initialize logging handlers for graphrag based on configuration.
69-
71+
7072
This function merges the functionality of configure_logging and create_pipeline_logger
7173
to provide a unified way to set up logging for the graphrag package.
72-
74+
7375
Parameters
7476
----------
7577
config : ReportingConfig | None, default=None
@@ -89,7 +91,7 @@ def init_loggers(
8991
"""
9092
# Import BlobWorkflowLogger here to avoid circular imports
9193
from graphrag.logger.blob_workflow_logger import BlobWorkflowLogger
92-
94+
9395
# If log_file is provided directly, override config to use file-based logging
9496
if log_file:
9597
log_path = Path(log_file)
@@ -100,7 +102,7 @@ def init_loggers(
100102
elif config is None:
101103
# Default to file-based logging if no config provided (maintains backward compatibility)
102104
config = ReportingConfig(base_dir="logs", type=ReportingType.file)
103-
105+
104106
# Convert string log level to numeric value if needed
105107
if isinstance(log_level, str):
106108
log_level = getattr(logging, log_level.upper(), logging.INFO)

tests/unit/logger/test_standard_logging.py

Lines changed: 40 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -65,15 +65,17 @@ def test_init_loggers_console_enabled():
6565
"""Test that init_loggers works with console enabled."""
6666
# Call init_loggers with console enabled (CLI mode)
6767
init_loggers(enable_console=True, log_level="INFO")
68-
68+
6969
logger = logging.getLogger("graphrag")
70-
70+
7171
# Should have both a console handler and a file handler (default config)
72-
console_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)]
72+
console_handlers = [
73+
h for h in logger.handlers if isinstance(h, logging.StreamHandler)
74+
]
7375
file_handlers = [h for h in logger.handlers if isinstance(h, logging.FileHandler)]
7476
assert len(console_handlers) > 0
7577
assert len(file_handlers) > 0 # Due to default file config
76-
78+
7779
# Clean up
7880
for handler in logger.handlers[:]:
7981
if isinstance(handler, logging.FileHandler):
@@ -86,25 +88,27 @@ def test_init_loggers_default_config():
8688
with tempfile.TemporaryDirectory() as temp_dir:
8789
# Call init_loggers with no config (should default to file logging)
8890
init_loggers(root_dir=temp_dir, log_level="INFO")
89-
91+
9092
logger = logging.getLogger("graphrag")
91-
93+
9294
# Should have a file handler due to default config
93-
file_handlers = [h for h in logger.handlers if isinstance(h, logging.FileHandler)]
95+
file_handlers = [
96+
h for h in logger.handlers if isinstance(h, logging.FileHandler)
97+
]
9498
assert len(file_handlers) > 0
95-
99+
96100
# Test logging works
97101
test_message = "Test default config message"
98102
logger.info(test_message)
99-
103+
100104
# Check that the log file was created with default structure
101105
log_file = Path(temp_dir) / "logs" / "logs.txt"
102106
assert log_file.exists()
103-
107+
104108
with open(log_file) as f:
105109
content = f.read()
106110
assert test_message in content
107-
111+
108112
# Clean up
109113
for handler in logger.handlers[:]:
110114
if isinstance(handler, logging.FileHandler):
@@ -115,32 +119,31 @@ def test_init_loggers_default_config():
115119
def test_init_loggers_file_config():
116120
"""Test that init_loggers works with file configuration."""
117121
with tempfile.TemporaryDirectory() as temp_dir:
118-
config = ReportingConfig(
119-
type=ReportingType.file,
120-
base_dir="logs"
121-
)
122-
122+
config = ReportingConfig(type=ReportingType.file, base_dir="logs")
123+
123124
# Call init_loggers with file config
124125
init_loggers(config=config, root_dir=temp_dir, log_level="INFO")
125-
126+
126127
logger = logging.getLogger("graphrag")
127-
128+
128129
# Should have a file handler
129-
file_handlers = [h for h in logger.handlers if isinstance(h, logging.FileHandler)]
130+
file_handlers = [
131+
h for h in logger.handlers if isinstance(h, logging.FileHandler)
132+
]
130133
assert len(file_handlers) > 0
131-
134+
132135
# Test logging works
133136
test_message = "Test init_loggers file message"
134137
logger.info(test_message)
135-
138+
136139
# Check that the log file was created
137140
log_file = Path(temp_dir) / "logs" / "logs.txt"
138141
assert log_file.exists()
139-
142+
140143
with open(log_file) as f:
141144
content = f.read()
142145
assert test_message in content
143-
146+
144147
# Clean up
145148
for handler in logger.handlers[:]:
146149
if isinstance(handler, logging.FileHandler):
@@ -151,32 +154,36 @@ def test_init_loggers_file_config():
151154
def test_init_loggers_console_config():
152155
"""Test that init_loggers works with console configuration."""
153156
config = ReportingConfig(type=ReportingType.console)
154-
157+
155158
# Call init_loggers with console config but no enable_console
156159
init_loggers(config=config, log_level="INFO", enable_console=False)
157-
160+
158161
logger = logging.getLogger("graphrag")
159-
162+
160163
# Should have a console handler from the config
161-
console_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)]
164+
console_handlers = [
165+
h for h in logger.handlers if isinstance(h, logging.StreamHandler)
166+
]
162167
assert len(console_handlers) > 0
163-
168+
164169
# Clean up
165170
logger.handlers.clear()
166171

167172

168173
def test_init_loggers_both_console():
169174
"""Test that init_loggers doesn't duplicate console handlers."""
170175
config = ReportingConfig(type=ReportingType.console)
171-
176+
172177
# Call init_loggers with both console config and enable_console=True
173178
init_loggers(config=config, log_level="INFO", enable_console=True)
174-
179+
175180
logger = logging.getLogger("graphrag")
176-
181+
177182
# Should have only one console handler (no duplicates)
178-
console_handlers = [h for h in logger.handlers if isinstance(h, logging.StreamHandler)]
183+
console_handlers = [
184+
h for h in logger.handlers if isinstance(h, logging.StreamHandler)
185+
]
179186
assert len(console_handlers) == 1
180-
187+
181188
# Clean up
182189
logger.handlers.clear()

0 commit comments

Comments
 (0)