Skip to content

Commit 50bd3b1

Browse files
authored
Merge pull request #59 from HeyLittleJohn/logger-fix
PATCH: more fixes to logging so it can be used as a common function outside of the curator package
2 parents 02208ab + 3957ad6 commit 50bd3b1

File tree

8 files changed

+37
-19
lines changed

8 files changed

+37
-19
lines changed

curator/data_pipeline/QuotePool.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import asyncio
2+
import logging
23
import queue
34
import traceback
45
from typing import (
@@ -24,7 +25,7 @@
2425
TaskID,
2526
)
2627

27-
from curator.proj_constants import log
28+
log = logging.getLogger(__name__)
2829

2930

3031
class QuoteScheduler(RoundRobin):

curator/data_pipeline/download.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
from datetime import datetime
23

34
from aiomultiprocess import Pool
@@ -22,9 +23,11 @@
2223
from db_tools.queries import lookup_multi_ticker_ids
2324
from db_tools.utils import OptionTicker
2425

25-
from curator.proj_constants import POLYGON_BASE_URL, log
26+
from curator.proj_constants import POLYGON_BASE_URL
2627
from curator.utils import pool_kwarg_config
2728

29+
log = logging.getLogger(__name__)
30+
2831
planned_exceptions = (
2932
InvalidArgs,
3033
ProjClientConnectionError,

curator/data_pipeline/orchestrator.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
from datetime import datetime
23

34
from data_pipeline.download import (
@@ -20,7 +21,7 @@
2021
from db_tools.utils import generate_o_ticker_lookup, pull_tickers_from_db
2122
from pandas import DataFrame
2223

23-
from curator.proj_constants import log
24+
log = logging.getLogger(__name__)
2425

2526

2627
async def import_all(tickers: list, start_date: datetime, end_date: datetime, months_hist: int):

curator/data_pipeline/path_runner.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import logging
12
import os
23
from abc import ABC, abstractmethod
34
from datetime import datetime
@@ -14,7 +15,7 @@
1415
)
1516
from db_tools.utils import OptionTicker
1617

17-
from curator.proj_constants import BASE_DOWNLOAD_PATH, POSTGRES_BATCH_MAX, log
18+
from curator.proj_constants import BASE_DOWNLOAD_PATH, POSTGRES_BATCH_MAX, logger_setup
1819
from curator.utils import (
1920
clean_o_ticker,
2021
months_ago,
@@ -23,6 +24,8 @@
2324
timestamp_to_datetime,
2425
)
2526

27+
log = logging.getLogger(__name__)
28+
2629

2730
class PathRunner(ABC):
2831
"""Base class for runner that will traverse the directory structure and retrieve/clean raw data.

curator/data_pipeline/polygon_utils.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import asyncio
2+
import logging
23
import os
34
from abc import ABC, abstractmethod
45
from datetime import date, datetime
@@ -17,7 +18,7 @@
1718
from dateutil.relativedelta import relativedelta
1819
from db_tools.utils import OptionTicker
1920

20-
from curator.proj_constants import BASE_DOWNLOAD_PATH, POLYGON_API_KEY, POLYGON_BASE_URL, log
21+
from curator.proj_constants import BASE_DOWNLOAD_PATH, POLYGON_API_KEY, POLYGON_BASE_URL
2122
from curator.utils import (
2223
extract_underlying_from_o_ticker,
2324
first_weekday_of_month,
@@ -28,6 +29,8 @@
2829
write_api_data_to_file,
2930
)
3031

32+
log = logging.getLogger(__name__)
33+
3134

3235
class Timespans(Enum):
3336
second = "second"

curator/data_pipeline/uploader.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -103,5 +103,6 @@ async def upload_options_quotes(ticker: str):
103103

104104

105105
if __name__ == "__main__":
106+
log.info("more success!")
106107
failed_paths = asyncio.run(upload_options_quotes(ticker="QQQ"))
107108
print(f"failed paths: {failed_paths}")

curator/proj_constants.py

Lines changed: 15 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -9,20 +9,22 @@
99
from pathlib import Path
1010

1111
import pandas_market_calendars as mcal
12-
import sentry_sdk
1312
import uvloop
14-
from sentry_sdk import capture_exception
1513
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
1614
from sqlalchemy.orm import sessionmaker
1715

18-
SENTRY_URL = os.environ.get("SENTRY_URL")
19-
sentry_sdk.init(
20-
dsn=SENTRY_URL,
21-
traces_sample_rate=0.1,
22-
)
23-
2416
ENVIRONMENT = os.environ.get("ENVIRONMENT")
2517
DEBUG = False
18+
SENTRY_URL = os.environ.get("SENTRY_URL", None)
19+
20+
if SENTRY_URL:
21+
import sentry_sdk
22+
from sentry_sdk import capture_exception
23+
24+
sentry_sdk.init(
25+
dsn=SENTRY_URL,
26+
traces_sample_rate=0.1,
27+
)
2628

2729

2830
# NOTE: use this function if pass variables to env via docker .env file. Otherwise use .pgpass
@@ -64,11 +66,12 @@ def db_uri_maker() -> str:
6466

6567
POOL_DEFAULT_KWARGS = {
6668
"processes": CPUS,
67-
"exception_handler": capture_exception,
6869
"loop_initializer": uvloop.new_event_loop,
6970
"childconcurrency": int(MAX_CONCURRENT_REQUESTS / CPUS),
7071
"queuecount": CPUS,
7172
}
73+
if SENTRY_URL:
74+
POOL_DEFAULT_KWARGS["exception_handler"] = capture_exception
7275

7376
async_engine = create_async_engine(
7477
POSTGRES_DATABASE_URL,
@@ -92,7 +95,7 @@ def db_uri_maker() -> str:
9295
)
9396

9497

95-
def logger_setup(project_name: str, debug=False):
98+
def logger_setup(project_name: str, debug=False, name=__name__) -> Logger:
9699
root_logger: Logger = logging.getLogger()
97100
log: Logger = logging.getLogger(__name__)
98101
log_formatter = logging.Formatter(
@@ -144,6 +147,7 @@ def filter(self, record):
144147
+ datetime.now().strftime("%Y-%m-%d")
145148
+ ".log"
146149
)
150+
os.makedirs(os.path.dirname(log_path), exist_ok=True)
147151
file_handler = FileHandler(log_path)
148152
file_handler.setFormatter(log_formatter)
149153

@@ -152,7 +156,7 @@ def filter(self, record):
152156
return log
153157

154158

155-
log = logger_setup("curator", DEBUG)
159+
log = logger_setup("curator", debug=DEBUG)
156160

157161
# market calendar
158162
o_cal = mcal.get_calendar("CBOE_Equity_Options")

curator/utils.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import functools
22
import inspect
33
import json
4+
import logging
45
import os
56
from datetime import datetime
67
from json import JSONDecodeError
@@ -116,8 +117,9 @@ async def _session_work(session: AsyncSession, args, kwargs):
116117

117118
@functools.wraps(func)
118119
async def wrapper_events(*args, **kwargs):
120+
wrap_log = logging.getLogger(__name__)
119121
func_mod_and_name = f"{func.__module__}.{func.__name__}"
120-
log.info(f"Starting {func_mod_and_name}")
122+
wrap_log.info(f"Starting {func_mod_and_name}")
121123
session_passed = False
122124
for arg in list(args) + list(kwargs.values()):
123125
if issubclass(type(arg), AsyncSession):
@@ -138,10 +140,10 @@ async def wrapper_events(*args, **kwargs):
138140
finally:
139141
await session.close()
140142

141-
log.info(f"Finished {func_mod_and_name}")
143+
wrap_log.info(f"Finished {func_mod_and_name}")
142144
return func_return
143145
except Exception as e:
144-
log.exception(e)
146+
wrap_log.exception(e)
145147
raise
146148

147149
return wrapper_events

0 commit comments

Comments
 (0)