Skip to content
This repository was archived by the owner on Jan 26, 2021. It is now read-only.

Commit e90686e

Browse files
committed
Create loggers per module
1 parent ac34426 commit e90686e

File tree

7 files changed

+58
-54
lines changed

7 files changed

+58
-54
lines changed

blotter/__main__.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
from blotter.server import Servicer
1717
from blotter.streaming import StreamingManager
1818

19+
logger = logging.getLogger(__package__)
20+
1921
parser = ArgumentParser(
2022
prog="blotter",
2123
description="Microservice to connect to Interactive Brokers and stream market data into Google BigQuery",
@@ -98,7 +100,7 @@ def main() -> None:
98100

99101
ib = ib_insync.IB()
100102

101-
logging.info(f"Connecting to IB on {args.tws_host}:{args.tws_port}")
103+
logger.info(f"Connecting to IB on {args.tws_host}:{args.tws_port}")
102104
ib.connect(
103105
host=args.tws_host,
104106
port=args.tws_port,
@@ -115,9 +117,9 @@ def handle_ib_thread_error(error: Exception) -> None:
115117
try:
116118
raise error
117119
except IBWarning:
118-
logging.warning(f"Warning from IB: {error}")
120+
logger.warning(f"Warning from IB: {error}")
119121
except ConnectionError:
120-
logging.exception(f"Connection error from IB:")
122+
logger.exception(f"Connection error from IB:")
121123

122124
thread = IBThread(ib, error_handler=handle_ib_thread_error)
123125
port = args.port or random.randint(49152, 65535)
@@ -131,7 +133,7 @@ def handle_ib_thread_error(error: Exception) -> None:
131133
if args.resume:
132134
servicer.resume_streaming()
133135

134-
logging.info(f"Server listening on port {port}")
136+
logger.info(f"Server listening on port {port}")
135137
thread.run_forever()
136138

137139

blotter/backfill.py

+6-8
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import logging
21
from datetime import datetime
2+
from logging import getLogger
33
from typing import Tuple
44

55
import ib_insync
@@ -10,11 +10,9 @@
1010
from blotter.blotter_pb2 import ContractSpecifier
1111
from blotter.error_handling import ErrorHandlerConfiguration
1212
from blotter.ib_helpers import DataError, qualify_contract_specifier
13-
from blotter.upload import (
14-
BarsTableColumn,
15-
table_name_for_contract,
16-
upload_dataframe,
17-
)
13+
from blotter.upload import BarsTableColumn, table_name_for_contract, upload_dataframe
14+
15+
logger = getLogger(__name__)
1816

1917

2018
async def backfill_bars(
@@ -50,7 +48,7 @@ async def backfill_bars(
5048

5149
earliest_date = barList[0].date
5250

53-
logging.info(f"Loaded {len(barList)} historical bars for {con}")
51+
logger.info(f"Loaded {len(barList)} historical bars for {con}")
5452
df = ib_insync.util.df(barList)
5553

5654
# See fields on BarData.
@@ -69,7 +67,7 @@ async def backfill_bars(
6967

7068
df[BarsTableColumn.BAR_SOURCE.value] = barList.whatToShow
7169

72-
logging.debug(df)
70+
logger.debug(df)
7371
job = upload_dataframe(table_name_for_contract(con), df, error_handler)
7472

7573
return (earliest_date, job)

blotter/ib_helpers.py

-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import asyncio
22
import concurrent.futures
3-
import logging
43
import math
54
from dataclasses import dataclass
65
from decimal import Decimal

blotter/options.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
import logging
21
from datetime import datetime
2+
from logging import getLogger
33
from typing import Any, Dict, Iterable, List, Union, cast
44

55
import ib_insync
@@ -11,6 +11,8 @@
1111
from blotter.ib_helpers import qualify_contract_specifier, sanitize_price
1212
from blotter.upload import TickersTableColumn, table_name_for_contract, upload_dataframe
1313

14+
logger = getLogger(__name__)
15+
1416

1517
async def _look_up_options(
1618
ib_client: ib_insync.IB, underlying: ib_insync.Contract,
@@ -25,7 +27,7 @@ async def _look_up_options(
2527
underlyingConId=underlying.conId,
2628
)
2729

28-
logging.info(f"Loaded {len(option_chains)} option chains for {underlying}")
30+
logger.info(f"Loaded {len(option_chains)} option chains for {underlying}")
2931

3032
option_contracts = (
3133
ib_insync.Option(
@@ -46,7 +48,7 @@ async def _look_up_options(
4648

4749
qualified_contracts = await ib_client.qualifyContractsAsync(*option_contracts)
4850

49-
logging.info(
51+
logger.info(
5052
f"Qualified {len(qualified_contracts)} options contracts for {underlying}"
5153
)
5254

@@ -63,7 +65,7 @@ async def _load_tickers_into_dataframe(
6365
"""
6466

6567
tickers = await ib_client.reqTickersAsync(*contracts, regulatorySnapshot=False)
66-
logging.info(f"Fetched {len(tickers)} tickers")
68+
logger.info(f"Fetched {len(tickers)} tickers")
6769

6870
def _ticker_dict(t: Any,) -> Dict[str, Union[str, datetime, float, None]]: # FIXME
6971
price_is_negative = t.close < 0
@@ -121,7 +123,7 @@ def _ticker_dict(t: Any,) -> Dict[str, Union[str, datetime, float, None]]: # FI
121123
TickersTableColumn.TIMESTAMP.value
122124
].dt.round("ms")
123125

124-
logging.debug(f"Tickers DataFrame: {df}")
126+
logger.debug(f"Tickers DataFrame: {df}")
125127
return df
126128

127129

blotter/server.py

+14-12
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import asyncio
22
import concurrent.futures
3-
import logging
3+
from logging import getLogger
44
from datetime import datetime, timedelta, timezone
55
from typing import Awaitable, Callable, Iterator, Optional, Tuple, TypeVar
66

@@ -14,6 +14,8 @@
1414
from blotter.streaming import StreamingID, StreamingManager
1515
from google.cloud import bigquery
1616

17+
logger = getLogger(__name__)
18+
1719
_T = TypeVar("_T")
1820

1921

@@ -64,7 +66,7 @@ def resume_streaming(self) -> None:
6466
"""
6567

6668
streaming_ids = list(self._streaming_manager.resume_streaming(self._ib_thread))
67-
logging.info(f"Resumed streaming IDs {streaming_ids}")
69+
logger.info(f"Resumed streaming IDs {streaming_ids}")
6870

6971
def _run_in_ib_thread(
7072
self, fn: Callable[[ib_insync.IB], Awaitable[_T]]
@@ -87,7 +89,7 @@ def LoadHistoricalData(
8789
request: blotter_pb2.LoadHistoricalDataRequest,
8890
context: grpc.ServicerContext,
8991
) -> Iterator[blotter_pb2.LoadHistoricalDataResponse]:
90-
logging.info(f"LoadHistoricalData: {request}")
92+
logger.info(f"LoadHistoricalData: {request}")
9193

9294
td = request_helpers.duration_timedelta_atleast(request.duration)
9395
end_date = datetime.fromtimestamp(request.endTimestampUTC, tz=timezone.utc)
@@ -96,7 +98,7 @@ def LoadHistoricalData(
9698
duration = request_helpers.duration_str(request.duration)
9799
start_date = end_date - timedelta(seconds=1)
98100
else:
99-
logging.debug(f"Splitting requested duration {td}")
101+
logger.debug(f"Splitting requested duration {td}")
100102
duration = request_helpers.duration_str(
101103
blotter_pb2.Duration(count=10, unit=blotter_pb2.Duration.TimeUnit.DAYS)
102104
)
@@ -108,7 +110,7 @@ async def _backfill(
108110
) -> Tuple[datetime, bigquery.LoadJob]:
109111
nonlocal end_date
110112

111-
logging.info(
113+
logger.info(
112114
f"Backfilling {duration} from {end_date} of {request.contractSpecifier}"
113115
)
114116

@@ -126,15 +128,15 @@ async def _backfill(
126128
while end_date > start_date:
127129
(end_date, job) = self._run_in_ib_thread(_backfill).result()
128130

129-
logging.info(f"BigQuery backfill job launched: {job.job_id}")
131+
logger.info(f"BigQuery backfill job launched: {job.job_id}")
130132
yield blotter_pb2.LoadHistoricalDataResponse(backfillJobID=job.job_id)
131133

132134
def StartRealTimeData(
133135
self,
134136
request: blotter_pb2.StartRealTimeDataRequest,
135137
context: grpc.ServicerContext,
136138
) -> blotter_pb2.StartRealTimeDataResponse:
137-
logging.info(f"StartRealTimeData: {request}")
139+
logger.info(f"StartRealTimeData: {request}")
138140

139141
async def _start_stream(ib_client: ib_insync.IB) -> StreamingID:
140142
return await self._streaming_manager.start_stream(
@@ -145,7 +147,7 @@ async def _start_stream(ib_client: ib_insync.IB) -> StreamingID:
145147
)
146148

147149
streaming_id = self._run_in_ib_thread(_start_stream).result()
148-
logging.debug(f"Real-time bars streaming ID: {streaming_id}")
150+
logger.debug(f"Real-time bars streaming ID: {streaming_id}")
149151

150152
return blotter_pb2.StartRealTimeDataResponse(requestID=streaming_id)
151153

@@ -154,7 +156,7 @@ def CancelRealTimeData(
154156
request: blotter_pb2.CancelRealTimeDataRequest,
155157
context: grpc.ServicerContext,
156158
) -> blotter_pb2.CancelRealTimeDataResponse:
157-
logging.info(f"CancelRealTimeData: {request}")
159+
logger.info(f"CancelRealTimeData: {request}")
158160

159161
async def _cancel_stream(ib_client: ib_insync.IB) -> None:
160162
await self._streaming_manager.cancel_stream(
@@ -167,23 +169,23 @@ async def _cancel_stream(ib_client: ib_insync.IB) -> None:
167169
def HealthCheck(
168170
self, request: blotter_pb2.HealthCheckRequest, context: grpc.ServicerContext,
169171
) -> blotter_pb2.HealthCheckResponse:
170-
logging.info(f"HealthCheck: {request}")
172+
logger.info(f"HealthCheck: {request}")
171173
return blotter_pb2.HealthCheckResponse()
172174

173175
def SnapshotOptionChain(
174176
self,
175177
request: blotter_pb2.SnapshotOptionChainRequest,
176178
context: grpc.ServicerContext,
177179
) -> blotter_pb2.SnapshotOptionChainResponse:
178-
logging.info(f"SnapshotOptionChain: {request}")
180+
logger.info(f"SnapshotOptionChain: {request}")
179181

180182
async def _snapshot(ib_client: ib_insync.IB) -> bigquery.LoadJob:
181183
return await snapshot_options(
182184
ib_client, request.contractSpecifier, self._error_handler
183185
)
184186

185187
job = self._run_in_ib_thread(_snapshot).result()
186-
logging.info(f"BigQuery import job launched: {job.job_id}")
188+
logger.info(f"BigQuery import job launched: {job.job_id}")
187189

188190
return blotter_pb2.SnapshotOptionChainResponse(importJobID=job.job_id)
189191

0 commit comments

Comments
 (0)