Skip to content

Commit

Permalink
Adding event queries to db (#1464)
Browse files Browse the repository at this point in the history
This PR is the first of a series of PRs to support multi-pool trading in
agent0.

In this PR, we move away from exposing the `agent.wallet` object (as
we're deprecating doing bookkeeping on the wallet itself in python) in
favor of using the `agent.get_positions()` function. This function (1)
does a query of the chain to gather events and adds them to a
`TradeEvent` db table, and (2) queries from the `TradeEvent` table to
get the current positions a wallet has.

The `TradeEvent` table handles all events on any hyperdrive tokens
(i.e., long/short/lp). There's a bit of overlap with the `WalletDelta`
table, with the main exception that the `TradeEvent` table is lazy - the
table only gets updated when `agent.get_positions()` gets called, and
only with the events from `agent`. In addition, the table handles both
trade events (e.g., `OpenLong`) and single transfer trades (e.g., wallet
to wallet transfers of tokens). We likely can deprecate the
`WalletDelta` table with a special call to gather all trade events from
a Hyperdrive pool, which fills the `TradeEvent` table with every wallet
that has made a trade on the pool.

There are a couple of places that can be optimized. Currently, we query
the chain for events for every `get_positions` call (from the latest
entry in the db to latest block). Some bookkeeping is needed to e.g.,
don't get events from the logs if a user calls `get_positions` on the
same block.

As a temporary fix, we also remove `agent.wallet` from remote chains,
and `get_positions` gathers all events from the remote chain each time
it's called. This will get fixed once the database is exposed in the
underlying chain object, with the remote wallet also using the
`TradeEvent` table to gather wallet positions.

Final note: the failing test here is fixed in
#1462.

## Changes
- Removing interactive wallet in favor of using `get_positions()`.
- Added `TradeEvent` table to database, with supporting ingestion
(`trade_events_to_db`) and query (`get_positions_from_db` and
`get_trade_events`) interface functions.
- Changed all interactive `agent.wallet` calls to
`agent.get_positions()`
- Renamed `contract_address` to `hyperdrive_address` in `PoolConfig` db
table.
  • Loading branch information
slundqui authored May 10, 2024
1 parent 14475b2 commit 5ef0d8b
Show file tree
Hide file tree
Showing 23 changed files with 880 additions and 344 deletions.
6 changes: 3 additions & 3 deletions examples/interactive_local_hyperdrive_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
open_long_event_2 = hyperdrive_agent0.open_long(FixedPoint(22222))

# View current wallet
print(hyperdrive_agent0.wallet)
print(hyperdrive_agent0.get_positions())

# NOTE these calls are chainwide calls, so all pools connected to this chain gets affected.
# Advance time, accepts timedelta or seconds
Expand All @@ -72,7 +72,7 @@
maturity_time=open_long_event_1.maturity_time, bonds=open_long_event_1.bond_amount
)

agent0_longs = list(hyperdrive_agent0.wallet.longs.values())
agent0_longs = list(hyperdrive_agent0.get_positions().longs.values())
close_long_event_2 = hyperdrive_agent0.close_long(
maturity_time=agent0_longs[0].maturity_time, bonds=agent0_longs[0].balance
)
Expand All @@ -85,7 +85,7 @@

# LP
add_lp_event = hyperdrive_agent2.add_liquidity(base=FixedPoint(44444))
remove_lp_event = hyperdrive_agent2.remove_liquidity(shares=hyperdrive_agent2.wallet.lp_tokens)
remove_lp_event = hyperdrive_agent2.remove_liquidity(shares=hyperdrive_agent2.get_positions().lp_tokens)

# The above trades doesn't result in withdraw shares, but the function below allows you
# to withdrawal shares from the pool.
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@ exclude = [".venv", ".vscode", "docs"]

[tool.isort]
line_length = 120
multi_line_output=3

[tool.ruff]
line-length = 120
Expand Down
40 changes: 5 additions & 35 deletions src/agent0/chainsync/analysis/data_to_analysis.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,11 @@
"""Functions to gather data from postgres, do analysis, and add back into postgres"""

import logging
from decimal import Decimal
from typing import Type

import numpy as np
import pandas as pd
from sqlalchemy import exc
from sqlalchemy.orm import Session

from agent0.chainsync.db.base import Base
from agent0.chainsync.db.hyperdrive import (
CurrentWallet,
PoolAnalysis,
Expand All @@ -21,6 +17,7 @@
get_transactions,
get_wallet_deltas,
)
from agent0.chainsync.df_to_db import df_to_db
from agent0.ethpy.hyperdrive import HyperdriveReadInterface

from .calc_base_buffer import calc_base_buffer
Expand All @@ -31,33 +28,6 @@

pd.set_option("display.max_columns", None)

MAX_BATCH_SIZE = 10000


def _df_to_db(insert_df: pd.DataFrame, schema_obj: Type[Base], session: Session):
"""Helper function to add a dataframe to a database"""
table_name = schema_obj.__tablename__

# dataframe to_sql needs data types from the schema object
dtype = {c.name: c.type for c in schema_obj.__table__.columns}
# Pandas doesn't play nice with types
insert_df.to_sql(
table_name,
con=session.connection(),
if_exists="append",
method="multi",
index=False,
dtype=dtype, # type: ignore
chunksize=MAX_BATCH_SIZE,
)
# commit the transaction
try:
session.commit()
except exc.DataError as err:
session.rollback()
logging.error("Error on adding %s: %s", table_name, err)
raise err


def calc_total_wallet_delta(wallet_deltas: pd.DataFrame) -> pd.DataFrame:
"""Calculates total wallet deltas from wallet_delta for every wallet type and position.
Expand Down Expand Up @@ -188,7 +158,7 @@ def data_to_analysis(
# If it doesn't exist, should be an empty dataframe
latest_wallet = get_current_wallet(db_session, end_block=start_block, coerce_float=False)
current_wallet_df = calc_current_wallet(wallet_deltas_df, latest_wallet)
_df_to_db(current_wallet_df, CurrentWallet, db_session)
df_to_db(current_wallet_df, CurrentWallet, db_session)

# calculate pnl through closeout pnl
# TODO this function might be slow due to contract call on chain
Expand Down Expand Up @@ -217,13 +187,13 @@ def data_to_analysis(
# TODO do scaling tests to see the limit of this
wallet_pnl["pnl"] = pnl_df
# Add wallet_pnl to the database
_df_to_db(wallet_pnl, WalletPNL, db_session)
df_to_db(wallet_pnl, WalletPNL, db_session)

# Build ticker from wallet delta
transactions = get_transactions(db_session, start_block, end_block, coerce_float=False)
ticker_df = calc_ticker(wallet_deltas_df, transactions, pool_info)
# TODO add ticker to database
_df_to_db(ticker_df, Ticker, db_session)
df_to_db(ticker_df, Ticker, db_session)

# We add pool analysis last since this table is what's being used to determine how far the data pipeline is.
# Calculate spot price
Expand All @@ -248,4 +218,4 @@ def data_to_analysis(

pool_analysis_df = pd.concat([pool_info["block_number"], spot_price, fixed_rate, base_buffer], axis=1)
pool_analysis_df.columns = ["block_number", "spot_price", "fixed_rate", "base_buffer"]
_df_to_db(pool_analysis_df, PoolAnalysis, db_session)
df_to_db(pool_analysis_df, PoolAnalysis, db_session)
5 changes: 4 additions & 1 deletion src/agent0/chainsync/db/hyperdrive/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Hyperdrive database utilities."""

from .chain_to_db import data_chain_to_db, init_data_chain_to_db
from .chain_to_db import data_chain_to_db, init_data_chain_to_db, trade_events_to_db
from .convert_data import (
convert_checkpoint_info,
convert_hyperdrive_transactions_for_block,
Expand All @@ -13,6 +13,7 @@
add_pool_config,
add_pool_infos,
add_transactions,
add_transfer_events,
add_wallet_deltas,
get_all_traders,
get_checkpoint_info,
Expand All @@ -23,8 +24,10 @@
get_pool_analysis,
get_pool_config,
get_pool_info,
get_positions_from_db,
get_ticker,
get_total_wallet_pnl_over_time,
get_trade_events,
get_transactions,
get_wallet_deltas,
get_wallet_pnl,
Expand Down
Loading

0 comments on commit 5ef0d8b

Please sign in to comment.