Skip to content

Commit

Permalink
Lazily evaluate log message parameters
Browse files Browse the repository at this point in the history
Logging like this:

    logger.debug("Hello, %s" % name)

causes the message string to always be interpolated even if the
message is not going to be logged.

Logging like this:

    logger.debug("Hello, %s", name)

will mean the string is only ever interpolated if it will be logged.

In practice here this will make little difference if the log level
is INFO
or above, but it does mean a `if` statement can be removed from
StreamListener.

See:

https://docs.python.org/3/howto/logging.html#optimization
https://stackoverflow.com/questions/4148790/lazy-logger-message-string-evaluation
  • Loading branch information
petedmarsh committed Sep 2, 2023
1 parent 796ec51 commit e55abe7
Show file tree
Hide file tree
Showing 6 changed files with 86 additions and 37 deletions.
18 changes: 10 additions & 8 deletions examples/strategies/marketrecorder.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def __init__(self, *args, **kwargs):
self._queue = queue.Queue()

def add(self) -> None:
logger.info("Adding strategy %s with id %s" % (self.name, self.recorder_id))
logger.info("Adding strategy %s with id %s", self.name, self.recorder_id)
# check local dir
if not os.path.isdir(self.local_dir):
raise OSError("File dir %s does not exist" % self.local_dir)
Expand Down Expand Up @@ -94,8 +94,10 @@ def process_closed_market(self, market, data: dict) -> None:
# check that file actually exists
if not os.path.isfile(file_dir):
logger.error(
"File: %s does not exist in /%s/%s/"
% (self.local_dir, market_id, self.recorder_id)
"File: %s does not exist in /%s/%s/",
self.local_dir,
market_id,
self.recorder_id,
)
return

Expand All @@ -117,7 +119,7 @@ def _load_processor(self):
# check file still exists (potential race condition)
if not os.path.isfile(file_dir):
logger.warning(
"File: %s does not exist in %s" % (market.market_id, file_dir)
"File: %s does not exist in %s", market.market_id, file_dir
)
continue
# compress file
Expand Down Expand Up @@ -173,8 +175,7 @@ def _clean_up(self) -> None:
if seconds_since > self._market_expiration:
if self._remove_gz_file:
logger.info(
"Removing: %s, age: %ss"
% (gz_path, round(seconds_since, 2))
"Removing: %s, age: %ss", gz_path, round(seconds_since, 2)
)
os.remove(gz_path)
txt_path = os.path.join(directory, file.split(".gz")[0])
Expand All @@ -183,8 +184,9 @@ def _clean_up(self) -> None:
seconds_since = time.time() - file_stats.st_mtime
if seconds_since > self._market_expiration:
logger.info(
"Removing: %s, age: %ss"
% (txt_path, round(seconds_since, 2))
"Removing: %s, age: %ss",
txt_path,
round(seconds_since, 2),
)
os.remove(txt_path)

Expand Down
4 changes: 3 additions & 1 deletion flumine/execution/baseexecution.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,9 @@ def _order_logger(
self, order: BaseOrder, instruction_report, package_type: OrderPackageType
):
logger.info(
"Order %s: %s" % (package_type.value, instruction_report.status),
"Order %s: %s",
package_type.value,
instruction_report.status,
extra={
"bet_id": order.bet_id,
"order_id": order.id,
Expand Down
30 changes: 20 additions & 10 deletions flumine/streams/datastream.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,15 +56,19 @@ def _process(self, data: list, publish_time: int) -> bool:
# removes closed market from cache
del self._caches[market_id]
logger.info(
"[MarketStream: %s] %s removed, %s markets in cache"
% (self.unique_id, market_id, len(self._caches))
"[MarketStream: %s] %s removed, %s markets in cache",
self.unique_id,
market_id,
len(self._caches),
)
elif market_id not in self._caches:
# adds empty object to cache to track live market count
self._caches[market_id] = object()
logger.info(
"[MarketStream: %s] %s added, %s markets in cache"
% (self.unique_id, market_id, len(self._caches))
"[MarketStream: %s] %s added, %s markets in cache",
self.unique_id,
market_id,
len(self._caches),
)
self._updates_processed += 1

Expand All @@ -82,8 +86,10 @@ def _process(self, data: list, publish_time: int) -> bool:
# adds empty object to cache to track live market count
self._caches[market_id] = object()
logger.info(
"[OrderStream: %s] %s added, %s markets in cache"
% (self.unique_id, market_id, len(self._caches))
"[OrderStream: %s] %s added, %s markets in cache",
self.unique_id,
market_id,
len(self._caches),
)
self._updates_processed += 1

Expand All @@ -101,8 +107,10 @@ def _process(self, data: list, publish_time: int) -> bool:
# adds empty object to cache to track live market count
self._caches[market_id] = object()
logger.info(
"[RaceStream: %s] %s added, %s markets in cache"
% (self.unique_id, market_id, len(self._caches))
"[RaceStream: %s] %s added, %s markets in cache",
self.unique_id,
market_id,
len(self._caches),
)
self._updates_processed += 1

Expand All @@ -120,8 +128,10 @@ def _process(self, data: list, publish_time: int) -> bool:
# adds empty object to cache to track live market count
self._caches[market_id] = object()
logger.info(
"[CricketStream: %s] %s added, %s markets in cache"
% (self.unique_id, market_id, len(self._caches))
"[CricketStream: %s] %s added, %s markets in cache",
self.unique_id,
market_id,
len(self._caches),
)
self._updates_processed += 1

Expand Down
27 changes: 19 additions & 8 deletions flumine/streams/historicalstream.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,10 @@ def _process(self, data: list, publish_time: int) -> bool:
logger.warning(
"[%s: %s]: Missing marketDefinition on market %s resulting "
"in potential missing data in the MarketBook (make sure "
"EX_MARKET_DEF is requested)"
% (self, self.unique_id, market_id)
"EX_MARKET_DEF is requested)",
self,
self.unique_id,
market_id,
)
market_book_cache = MarketBookCache(
market_id,
Expand All @@ -55,8 +57,11 @@ def _process(self, data: list, publish_time: int) -> bool:
)
self._caches[market_id] = market_book_cache
logger.info(
"[%s: %s]: %s added, %s markets in cache"
% (self, self.unique_id, market_id, len(self._caches))
"[%s: %s]: %s added, %s markets in cache",
self,
self.unique_id,
market_id,
len(self._caches),
)

# listener_kwargs filtering
Expand Down Expand Up @@ -116,8 +121,11 @@ def _process(self, race_updates: list, publish_time: int) -> bool:
race_cache.start_time = create_time(publish_time, race_id)
self._caches[market_id] = race_cache
logger.info(
"[%s: %s]: %s added, %s markets in cache"
% (self, self.unique_id, market_id, len(self._caches))
"[%s: %s]: %s added, %s markets in cache",
self,
self.unique_id,
market_id,
len(self._caches),
)

# filter after start time
Expand Down Expand Up @@ -151,8 +159,11 @@ def _process(self, cricket_changes: list, publish_time: int) -> bool:
)
self._caches[market_id] = cricket_match_cache
logger.info(
"[%s: %s]: %s added, %s markets in cache"
% (self, self.unique_id, market_id, len(self._caches))
"[%s: %s]: %s added, %s markets in cache",
self,
self.unique_id,
market_id,
len(self._caches),
)

cricket_match_cache.update_cache(cricket_change, publish_time)
Expand Down
6 changes: 4 additions & 2 deletions flumine/streams/streams.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,10 @@ def __call__(self, strategy: BaseStrategy) -> None:
country_code = get_file_md(market, "countryCode")
if market_types and market_type and market_type not in market_types:
logger.warning(
"Skipping market %s (%s) for strategy %s due to marketType filter"
% (market, market_type, strategy)
"Skipping market %s (%s) for strategy %s due to marketType filter",
market,
market_type,
strategy,
)
elif (
country_codes
Expand Down
38 changes: 30 additions & 8 deletions flumine/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,12 +237,18 @@ def call_strategy_error_handling(
return func(market, market_book)
except FlumineException as e:
logger.error(
"FlumineException %s in %s (%s)" % (e, func.__name__, market.market_id),
"FlumineException %s in %s (%s)",
e,
func.__name__,
market.market_id,
exc_info=True,
)
except Exception as e:
logger.critical(
"Unknown error %s in %s (%s)" % (e, func.__name__, market.market_id),
"Unknown error %s in %s (%s)",
e,
func.__name__,
market.market_id,
exc_info=True,
)
if config.raise_errors:
Expand All @@ -255,12 +261,18 @@ def call_middleware_error_handling(middleware, market) -> None:
middleware(market)
except FlumineException as e:
logger.error(
"FlumineException %s in %s (%s)" % (e, middleware, market.market_id),
"FlumineException %s in %s (%s)",
e,
middleware,
market.market_id,
exc_info=True,
)
except Exception as e:
logger.critical(
"Unknown error %s in %s (%s)" % (e, middleware, market.market_id),
"Unknown error %s in %s (%s)",
e,
middleware,
market.market_id,
exc_info=True,
)
if config.raise_errors:
Expand All @@ -272,12 +284,18 @@ def call_process_orders_error_handling(strategy, market, strategy_orders: list)
strategy.process_orders(market, strategy_orders)
except FlumineException as e:
logger.error(
"FlumineException %s in %s (%s)" % (e, strategy, market.market_id),
"FlumineException %s in %s (%s)",
e,
strategy,
market.market_id,
exc_info=True,
)
except Exception as e:
logger.critical(
"Unknown error %s in %s (%s)" % (e, strategy, market.market_id),
"Unknown error %s in %s (%s)",
e,
strategy,
market.market_id,
exc_info=True,
)
if config.raise_errors:
Expand All @@ -289,12 +307,16 @@ def call_process_raw_data(strategy, clk: str, publish_time: int, datum: dict) ->
strategy.process_raw_data(clk, publish_time, datum)
except FlumineException as e:
logger.error(
"FlumineException %s in %s" % (e, strategy),
"FlumineException %s in %s",
e,
strategy,
exc_info=True,
)
except Exception as e:
logger.critical(
"Unknown error %s in %s" % (e, strategy),
"Unknown error %s in %s",
e,
strategy,
exc_info=True,
)
if config.raise_errors:
Expand Down

0 comments on commit e55abe7

Please sign in to comment.