Skip to content

Commit

Permalink
feat: use CHAINID constant throughout repo (#870)
Browse files Browse the repository at this point in the history
* feat: CHAINID constant

* fix: circular import

* feat: adapt networks

* fix: circular import

* fix: circular import
  • Loading branch information
BobTheBuidler authored Dec 22, 2024
1 parent e4f672a commit 82ad965
Show file tree
Hide file tree
Showing 28 changed files with 143 additions and 136 deletions.
4 changes: 2 additions & 2 deletions y/_db/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,10 +171,10 @@ def set_metadata(self, from_block: int, done_thru: int) -> None:
"""
try:
with db_session:
return self._set_metadata(from_block, done_thru)
self._set_metadata(from_block, done_thru)
except TransactionIntegrityError as e:
logger.debug("%s got exc %s when setting cache metadata", self, e)
return self.set_metadata(from_block, done_thru)
self.set_metadata(from_block, done_thru)
except OptimisticCheckError as e:
# Don't need to update in this case
logger.debug("%s got exc %s when setting cache metadata", self, e)
Expand Down
5 changes: 3 additions & 2 deletions y/_db/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
db_session,
)


_T = TypeVar("_T")
_P = ParamSpec("_P")

Expand Down Expand Up @@ -113,7 +114,7 @@ def retry_locked_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:


_result_count_logger = logging.getLogger(f"{__name__}.result_count")

_CHAIN_INFO = "chain", chain.id

def log_result_count(
name: str, arg_names: Iterable[str] = []
Expand Down Expand Up @@ -145,7 +146,7 @@ def result_count_wrap(*args: _P.args, **kwargs: _P.kwargs) -> _T:
results = fn(*args, **kwargs)
if _result_count_logger.isEnabledFor(logging.DEBUG):
arg_values = " ".join(
f"{k} {v}" for k, v in [("chain", chain.id), *zip(arg_names, args)]
f"{k} {v}" for k, v in (_CHAIN_INFO, *zip(arg_names, args))
)
_result_count_logger.debug(
"loaded %s %s for %s", len(results), name, arg_values
Expand Down
6 changes: 3 additions & 3 deletions y/_db/utils/contract.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,13 @@
from typing import Dict, Optional

from a_sync import ProcessingQueue, PruningThreadPoolExecutor, a_sync
from brownie import chain
from cachetools import TTLCache, cached
from pony.orm import select
from y._db.decorators import db_session_retry_locked, log_result_count
from y._db.entities import Contract
from y._db.utils._ep import _get_get_token
from y._db.utils.utils import ensure_block
from y.constants import CHAINID
from y.datatypes import Address, Block


Expand Down Expand Up @@ -70,7 +70,7 @@ def _set_deploy_block(address: str, deploy_block: int) -> None:

ensure_block(deploy_block, sync=True)
get_token = _get_get_token()
get_token(address, sync=True).deploy_block = (chain.id, deploy_block)
get_token(address, sync=True).deploy_block = (CHAINID, deploy_block)
_logger_debug("deploy block cached for %s: %s", address, deploy_block)


Expand Down Expand Up @@ -109,6 +109,6 @@ def known_deploy_blocks() -> Dict[Address, Block]:
select(
(c.address, c.deploy_block.number)
for c in Contract
if c.chain.id == chain.id and c.deploy_block.number
if c.chain.id == CHAINID and c.deploy_block.number
)
)
15 changes: 7 additions & 8 deletions y/_db/utils/logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from a_sync import PruningThreadPoolExecutor, a_sync
from a_sync.executor import AsyncExecutor
from async_lru import alru_cache
from brownie import chain
from brownie.network.event import _EventItem
from eth_typing import HexStr
from evmspec.data import Address, HexBytes32, uint
Expand All @@ -25,6 +24,7 @@
from y._db.log import Log
from y._db.utils.bulk import insert as _bulk_insert
from y._db.utils._ep import _get_get_block
from y.constants import CHAINID

logger = logging.getLogger(__name__)

Expand All @@ -43,7 +43,6 @@

_BLOCK_COLS = "chain", "number"
_BLOCK_COLS_EXTENDED = "chain", "number", "classtype"
_CHAINID = chain.id

_topic_executor = PruningThreadPoolExecutor(10, "ypricemagic db executor [topic]")
_hash_executor = PruningThreadPoolExecutor(10, "ypricemagic db executor [hash]")
Expand Down Expand Up @@ -86,7 +85,7 @@ async def _prepare_log(log: Log) -> tuple:
for i, topic_dbid in itertools.zip_longest(range(4), topic_dbids)
}
params = {
"block_chain": _CHAINID,
"block_chain": CHAINID,
"block_number": log.blockNumber,
"transaction": transaction_dbid,
"log_index": log.logIndex,
Expand All @@ -111,12 +110,12 @@ async def bulk_insert(
# handle a conflict with eth-portfolio's extended db
if _check_using_extended_db():
blocks = tuple(
(_CHAINID, block, "BlockExtended")
(CHAINID, block, "BlockExtended")
for block in {log.blockNumber for log in logs}
)
blocks_fut = submit(_bulk_insert, Block, _BLOCK_COLS_EXTENDED, blocks)
else:
blocks = tuple((_CHAINID, block) for block in {log.blockNumber for log in logs})
blocks = tuple((CHAINID, block) for block in {log.blockNumber for log in logs})
blocks_fut = submit(_bulk_insert, Block, _BLOCK_COLS, blocks)
del blocks

Expand All @@ -137,7 +136,7 @@ async def bulk_insert(
)
del topics

await gather(block_fut, hashes_fut, topics_fut)
await gather(blocks_fut, hashes_fut, topics_fut)

await executor.run(
_bulk_insert,
Expand Down Expand Up @@ -178,7 +177,7 @@ def _get_hash_dbid(hexstr: HexStr) -> int:
def get_decoded(log: Log) -> Optional[_EventItem]:
# TODO: load these in bulk
if decoded := DbLog[
_CHAINID, log.block_number, log.transaction_hash, log.log_index
CHAINID, log.block_number, log.transaction_hash, log.log_index
].decoded:
return _EventItem(
decoded["name"], decoded["address"], decoded["event_data"], decoded["pos"]
Expand All @@ -188,7 +187,7 @@ def get_decoded(log: Log) -> Optional[_EventItem]:
@db_session
@retry_locked
def set_decoded(log: Log, decoded: _EventItem) -> None:
DbLog[_CHAINID, log.block_number, log.transaction_hash, log.log_index].decoded = (
DbLog[CHAINID, log.block_number, log.transaction_hash, log.log_index].decoded = (
decoded
)

Expand Down
10 changes: 5 additions & 5 deletions y/_db/utils/price.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from typing import Dict, Optional

from a_sync import ProcessingQueue
from brownie import chain
from cachetools import TTLCache, cached
from pony.orm import select

Expand All @@ -13,6 +12,7 @@
from y._db.entities import Price, insert
from y._db.utils.token import ensure_token
from y._db.utils.utils import ensure_block
from y.constants import CHAINID
from y.datatypes import Address


Expand Down Expand Up @@ -47,7 +47,7 @@ def get_price(address: str, block: int) -> Optional[Decimal]:
if price := known_prices_at_block(block).pop(address, None):
_logger_debug("found %s block %s price %s in ydb", address, block, price)
return price
if (price := Price.get(token=(chain.id, address), block=(chain.id, block))) and (
if (price := Price.get(token=(CHAINID, address), block=(CHAINID, block))) and (
price := price.price
):
_logger_debug("found %s block %s price %s in ydb", address, block, price)
Expand Down Expand Up @@ -83,8 +83,8 @@ async def _set_price(address: str, block: int, price: Decimal) -> None:
try:
await insert(
type=Price,
block=(chain.id, block),
token=(chain.id, address),
block=(CHAINID, block),
token=(CHAINID, address),
price=Decimal(price),
sync=False,
)
Expand Down Expand Up @@ -126,6 +126,6 @@ def known_prices_at_block(number: int) -> Dict[Address, Decimal]:
select(
(p.token.address, p.price)
for p in Price
if p.block.chain.id == chain.id and p.block.number == number
if p.block.CHAINID == CHAINID and p.block.number == number
)
)
18 changes: 9 additions & 9 deletions y/_db/utils/token.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@

import a_sync
from a_sync import PruningThreadPoolExecutor
from brownie import chain
from cachetools import TTLCache, cached
from pony.orm import commit, db_session, select

Expand All @@ -18,6 +17,7 @@
from y._db.entities import Address, Token, insert
from y._db.exceptions import EEEError
from y._db.utils._ep import _get_get_token
from y.constants import CHAINID
from y.datatypes import AnyAddressType
from y.utils import _erc20

Expand Down Expand Up @@ -59,13 +59,13 @@ def get_token(address: str) -> Token:
if address == constants.EEE_ADDRESS:
raise EEEError(f"cannot create token entity for {constants.EEE_ADDRESS}")
while True:
if entity := Address.get(chain=chain.id, address=address):
if entity := Address.get(chain=CHAINID, address=address):
if isinstance(entity, Token):
return entity
entity.delete()
commit()
return insert(type=Token, chain=chain.id, address=address) or Token.get(
chain=chain.id, address=address
return insert(type=Token, chain=CHAINID, address=address) or Token.get(
chain=CHAINID, address=address
)


Expand Down Expand Up @@ -404,7 +404,7 @@ def known_tokens() -> Set[str]:
>>> tokens = known_tokens()
>>> print(tokens)
"""
return set(select(t.address for t in Token if t.chain.id == chain.id))
return set(select(t.address for t in Token if t.chain.id == CHAINID))


@cached(TTLCache(maxsize=1, ttl=60 * 60), lock=threading.Lock())
Expand All @@ -424,7 +424,7 @@ def known_buckets() -> Dict[str, str]:
"""
return dict(
select(
(t.address, t.bucket) for t in Token if t.chain.id == chain.id and t.bucket
(t.address, t.bucket) for t in Token if t.chain.id == CHAINID and t.bucket
)
)

Expand All @@ -448,7 +448,7 @@ def known_decimals() -> Dict[Address, int]:
select(
(t.address, t.decimals)
for t in Token
if t.chain.id == chain.id and t.decimals
if t.chain.id == CHAINID and t.decimals
)
)

Expand All @@ -470,7 +470,7 @@ def known_symbols() -> Dict[Address, str]:
"""
return dict(
select(
(t.address, t.symbol) for t in Token if t.chain.id == chain.id and t.symbol
(t.address, t.symbol) for t in Token if t.chain.id == CHAINID and t.symbol
)
)

Expand All @@ -491,5 +491,5 @@ def known_names() -> Dict[Address, str]:
>>> print(names)
"""
return dict(
select((t.address, t.name) for t in Token if t.chain.id == chain.id and t.name)
select((t.address, t.name) for t in Token if t.chain.id == CHAINID and t.name)
)
21 changes: 11 additions & 10 deletions y/_db/utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@

from a_sync import ProcessingQueue, PruningThreadPoolExecutor, a_sync
from pony.orm import commit, select
from brownie import chain

from y._db.decorators import (
a_sync_read_db_session,
Expand All @@ -15,6 +14,8 @@
log_result_count,
)
from y._db.entities import Block, BlockAtTimestamp, Chain, insert
from y.constants import CHAINID


logger = getLogger(__name__)
_logger_debug = logger.debug
Expand Down Expand Up @@ -43,7 +44,7 @@ def get_chain() -> Chain:
- :class:`Chain`
- :func:`insert`
"""
return Chain.get(id=chain.id) or insert(type=Chain, id=chain.id) or Chain[chain.id]
return Chain.get(id=CHAINID) or insert(type=Chain, id=CHAINID) or Chain[CHAINID]


@lru_cache
Expand Down Expand Up @@ -82,9 +83,9 @@ def get_block(number: int) -> Block:
- :class:`Block`
- :func:`insert`
"""
if block := Block.get(chain=chain.id, number=number):
if block := Block.get(chain=CHAINID, number=number):
return block
return insert(type=Block, chain=chain.id, number=number) or get_block(
return insert(type=Block, chain=CHAINID, number=number) or get_block(
number, sync=True
)

Expand Down Expand Up @@ -147,7 +148,7 @@ def get_block_timestamp(number: int) -> Optional[int]:
ts = parser.parse(ts)
unix = ts.timestamp()
_logger_debug(
"got Block[%s, %s].timestamp from cache: %s, %s", chain.id, number, unix, ts
f"got Block[{CHAINID}, %s].timestamp from cache: %s, %s", number, unix, ts
)
return unix

Expand All @@ -174,7 +175,7 @@ def get_block_at_timestamp(timestamp: datetime) -> Optional[int]:
if block := known_blocks_for_timestamps().pop(timestamp, None):
_logger_debug("found block %s for %s in ydb", block, timestamp)
return block
elif entity := BlockAtTimestamp.get(chainid=chain.id, timestamp=timestamp):
elif entity := BlockAtTimestamp.get(chainid=CHAINID, timestamp=timestamp):
block = entity.block
_logger_debug("found block %s for %s in ydb", block, timestamp)
return block
Expand Down Expand Up @@ -239,7 +240,7 @@ def _set_block_at_timestamp(timestamp: datetime, block: int) -> None:
See Also:
- :class:`BlockAtTimestamp`
"""
insert(BlockAtTimestamp, chainid=chain.id, timestamp=timestamp, block=block)
insert(BlockAtTimestamp, chainid=CHAINID, timestamp=timestamp, block=block)
_logger_debug("inserted block %s for %s", block, timestamp)


Expand All @@ -265,7 +266,7 @@ def known_blocks() -> Set[int]:
See Also:
- :class:`Block`
"""
return set(select(b.number for b in Block if b.chain.id == chain.id))
return set(select(b.number for b in Block if b.chain.id == CHAINID))


@lru_cache(maxsize=1)
Expand All @@ -284,7 +285,7 @@ def known_block_timestamps() -> Dict[int, datetime]:
- :class:`Block`
"""
query = select(
(b.number, b.timestamp) for b in Block if b.chain.id == chain.id and b.timestamp
(b.number, b.timestamp) for b in Block if b.chain.id == CHAINID and b.timestamp
)
page_size = 100_000
timestamps = {}
Expand All @@ -311,6 +312,6 @@ def known_blocks_for_timestamps() -> Dict[datetime, int]:
"""
return dict(
select(
(x.timestamp, x.block) for x in BlockAtTimestamp if x.chainid == chain.id
(x.timestamp, x.block) for x in BlockAtTimestamp if x.chainid == CHAINID
)
)
Loading

0 comments on commit 82ad965

Please sign in to comment.