Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Using pypechain and hyperdrivetypes events #1698

Merged
merged 18 commits into from
Oct 8, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/interactive_hyperdrive_forking_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
# trades below
open_long_event = hyperdrive_agent0.open_long(base=FixedPoint(111))
close_long_event = hyperdrive_agent0.close_long(
maturity_time=open_long_event.maturity_time, bonds=open_long_event.bond_amount
maturity_time=open_long_event.args.maturity_time, bonds=open_long_event.args.bond_amount
)


Expand Down
4 changes: 2 additions & 2 deletions examples/interactive_local_hyperdrive_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@

# Close previous longs from events
close_long_event_1 = agent0.close_long(
maturity_time=open_long_event_1.maturity_time,
bonds=open_long_event_1.bond_amount,
maturity_time=open_long_event_1.args.maturity_time,
bonds=open_long_event_1.args.bond_amount,
pool=hyperdrive0,
)

Expand Down
4 changes: 2 additions & 2 deletions examples/interactive_remote_hyperdrive_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,8 +95,8 @@
# Here, base is unitless and is dependent on the underlying tokens the pool uses.
open_long_event = agent0.open_long(base=FixedPoint(11111), pool=hyperdrive_pool)
close_long_event = agent0.close_long(
maturity_time=open_long_event.maturity_time,
bonds=open_long_event.bond_amount,
maturity_time=open_long_event.args.maturity_time,
bonds=open_long_event.args.bond_amount,
pool=hyperdrive_pool,
)

Expand Down
4 changes: 2 additions & 2 deletions examples/short_examples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
" chain,\n",
" registry_address=REGISTRY_ADDRESS,\n",
")\n",
"agent.get_positions(pool_filter=registered_pools, show_closed_positions=True)"
"agent.get_positions(pool_filter=list(registered_pools), show_closed_positions=True)"
]
},
{
Expand Down Expand Up @@ -157,7 +157,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.11"
"version": "3.10.14"
}
},
"nbformat": 4,
Expand Down
434 changes: 220 additions & 214 deletions examples/tutorial.ipynb

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ dependencies = [
"fixedpointmath>=0.2.1",
"hexbytes>=1.2.1",
"hyperdrivepy==0.17.1",
"hyperdrivetypes>=1.0.18.5",
"hyperdrivetypes>=1.0.19.6",
"ipython>=8.26.0",
"matplotlib>=3.9.2",
"mplfinance>=0.12.10b0",
Expand Down
4 changes: 2 additions & 2 deletions src/agent0/chainsync/analysis/db_to_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from sqlalchemy.orm import Session

from agent0.chainsync.db.hyperdrive import (
PositionSnapshot,
DBPositionSnapshot,
get_current_positions,
get_latest_block_number_from_positions_snapshot_table,
)
Expand Down Expand Up @@ -112,4 +112,4 @@ def snapshot_positions_to_db(

if len(all_pool_positions) > 0:
# Add wallet_pnl to the database
df_to_db(pd.concat(all_pool_positions, axis=0), PositionSnapshot, db_session)
df_to_db(pd.concat(all_pool_positions, axis=0), DBPositionSnapshot, db_session)
2 changes: 1 addition & 1 deletion src/agent0/chainsync/db/base/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@
initialize_session,
query_tables,
)
from .schema import AddrToUsername, Base
from .schema import DBAddrToUsername, DBBase
14 changes: 7 additions & 7 deletions src/agent0/chainsync/db/base/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from agent0.chainsync import PostgresConfig, build_postgres_config_from_env

from .schema import AddrToUsername, Base
from .schema import DBAddrToUsername, DBBase

# classes for sqlalchemy that define table schemas have no methods.
# pylint: disable=too-few-public-methods
Expand Down Expand Up @@ -155,7 +155,7 @@ def initialize_session(
for _ in range(10):
try:
# create tables
Base.metadata.create_all(engine)
DBBase.metadata.create_all(engine)
# commit the transaction
session.commit()
exception = None
Expand Down Expand Up @@ -224,7 +224,7 @@ def add_addr_to_username(
raise ValueError("Fatal error: postgres returning multiple entries for primary key")

# This merge adds the row if not exist (keyed by address), otherwise will overwrite with this entry
session.merge(AddrToUsername(address=address, username=username))
session.merge(DBAddrToUsername(address=address, username=username))

try:
session.commit()
Expand All @@ -248,13 +248,13 @@ def get_addr_to_username(session: Session, address: str | None = None) -> pd.Dat
DataFrame
A DataFrame that consists of the queried pool config data
"""
query = session.query(AddrToUsername)
query = session.query(DBAddrToUsername)
if address is not None:
query = query.filter(AddrToUsername.address == address)
query = query.filter(DBAddrToUsername.address == address)
return pd.read_sql(query.statement, con=session.connection())


class TableWithBlockNumber(Base):
class TableWithBlockNumber(DBBase):
"""An abstract table that has block_number"""

__abstract__ = True
Expand All @@ -271,7 +271,7 @@ def block_number(self) -> Column:
return Column(String)


def get_latest_block_number_from_table(table_obj: Type[Base], session: Session) -> int:
def get_latest_block_number_from_table(table_obj: Type[DBBase], session: Session) -> int:
"""Get the latest block number based on the specified table in the db.

Arguments
Expand Down
4 changes: 2 additions & 2 deletions src/agent0/chainsync/db/base/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,11 @@
# Currently using `Mapped[Union[str, None]]` for backwards compatibility


class Base(MappedAsDataclass, DeclarativeBase):
class DBBase(MappedAsDataclass, DeclarativeBase):
"""Base class to subclass from to define the schema"""


class AddrToUsername(Base):
class DBAddrToUsername(DBBase):
"""Maps an address to a username."""

__tablename__ = "addr_to_username"
Expand Down
14 changes: 7 additions & 7 deletions src/agent0/chainsync/db/base/schema_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

import pytest

from .schema import AddrToUsername
from .schema import DBAddrToUsername


class TestAddrToUsernameTable:
Expand All @@ -11,36 +11,36 @@ class TestAddrToUsernameTable:
@pytest.mark.docker
def test_create_addr_to_username(self, db_session):
"""Create and entry"""
user_map = AddrToUsername(address="1", username="a")
user_map = DBAddrToUsername(address="1", username="a")
db_session.add(user_map)
db_session.commit()

retrieved_map = db_session.query(AddrToUsername).filter_by(address="1").first()
retrieved_map = db_session.query(DBAddrToUsername).filter_by(address="1").first()
assert retrieved_map is not None
assert retrieved_map.username == "a"

@pytest.mark.docker
def test_update_addr_to_username(self, db_session):
"""Update an entry"""
user_map = AddrToUsername(address="1", username="a")
user_map = DBAddrToUsername(address="1", username="a")
db_session.add(user_map)
db_session.commit()

user_map.username = "b"
db_session.commit()

updated_map = db_session.query(AddrToUsername).filter_by(address="1").first()
updated_map = db_session.query(DBAddrToUsername).filter_by(address="1").first()
assert updated_map.username == "b"

@pytest.mark.docker
def test_delete_addr_to_username(self, db_session):
"""Delete an entry"""
user_map = AddrToUsername(address="1", username="a")
user_map = DBAddrToUsername(address="1", username="a")
db_session.add(user_map)
db_session.commit()

db_session.delete(user_map)
db_session.commit()

deleted_map = db_session.query(AddrToUsername).filter_by(address="1").first()
deleted_map = db_session.query(DBAddrToUsername).filter_by(address="1").first()
assert deleted_map is None
2 changes: 1 addition & 1 deletion src/agent0/chainsync/db/hyperdrive/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,4 +26,4 @@
get_total_pnl_over_time,
get_trade_events,
)
from .schema import CheckpointInfo, HyperdriveAddrToName, PoolConfig, PoolInfo, PositionSnapshot
from .schema import DBCheckpointInfo, DBHyperdriveAddrToName, DBPoolConfig, DBPoolInfo, DBPositionSnapshot
80 changes: 56 additions & 24 deletions src/agent0/chainsync/db/hyperdrive/chain_to_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@
from agent0.ethpy.hyperdrive import HyperdriveReadInterface

from .convert_data import convert_checkpoint_events, convert_pool_config, convert_pool_info, convert_trade_events
from .event_getters import get_event_logs_for_db
from .interface import (
add_pool_config,
add_pool_infos,
get_latest_block_number_from_checkpoint_info_table,
get_latest_block_number_from_trade_event,
)
from .schema import CheckpointInfo, TradeEvent
from .schema import DBCheckpointInfo, DBTradeEvent


def init_data_chain_to_db(
Expand Down Expand Up @@ -142,13 +143,20 @@ def checkpoint_events_to_db(
# NOTE we get all numeric arguments in events as string to prevent precision loss

from_block = get_latest_block_number_from_checkpoint_info_table(db_session, interface.hyperdrive_address) + 1
all_events.extend(interface.get_checkpoint_events(from_block=from_block, numeric_args_as_str=True))
all_events.extend(
get_event_logs_for_db(
slundqui marked this conversation as resolved.
Show resolved Hide resolved
interface,
interface.hyperdrive_contract.events.CreateCheckpoint,
trade_base_unit_conversion=False,
from_block=from_block,
)
)

events_df = convert_checkpoint_events(all_events)

# Add to db
if len(events_df) > 0:
df_to_db(events_df, CheckpointInfo, db_session)
df_to_db(events_df, DBCheckpointInfo, db_session)


def trade_events_to_db(
Expand Down Expand Up @@ -192,25 +200,31 @@ def trade_events_to_db(
# Look for transfer single events in both directions if wallet_addr is set
if wallet_addr is not None:
all_events.extend(
interface.get_transfer_single_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.TransferSingle,
trade_base_unit_conversion=False,
from_block=from_block,
argument_filters={"to": wallet_addr},
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_transfer_single_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.TransferSingle,
trade_base_unit_conversion=False,
from_block=from_block,
argument_filters={"from": wallet_addr},
numeric_args_as_str=True,
)
)
# Otherwise, don't filter by wallet
else:
all_events.extend(
interface.get_transfer_single_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.TransferSingle,
trade_base_unit_conversion=False,
from_block=from_block,
numeric_args_as_str=True,
)
)

Expand All @@ -223,56 +237,74 @@ def trade_events_to_db(
provider_arg_filter = None

all_events.extend(
interface.get_initialize_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.Initialize,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=provider_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_open_long_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.OpenLong,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=trader_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_close_long_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.CloseLong,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=trader_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_open_short_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.OpenShort,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=trader_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_close_short_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.CloseShort,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=trader_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_add_liquidity_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.AddLiquidity,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=provider_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_remove_liquidity_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.RemoveLiquidity,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=provider_arg_filter,
numeric_args_as_str=True,
)
)
all_events.extend(
interface.get_redeem_withdrawal_shares_events(
get_event_logs_for_db(
interface,
interface.hyperdrive_contract.events.RedeemWithdrawalShares,
trade_base_unit_conversion=True,
from_block=from_block,
argument_filters=provider_arg_filter,
numeric_args_as_str=True,
Expand All @@ -283,4 +315,4 @@ def trade_events_to_db(

# Add to db
if len(events_df) > 0:
df_to_db(events_df, TradeEvent, db_session)
df_to_db(events_df, DBTradeEvent, db_session)
Loading
Loading