diff --git a/README.md b/README.md index de85243..78cbb7d 100644 --- a/README.md +++ b/README.md @@ -34,27 +34,33 @@ Second, make sure to install the plugins: $ ape plugins install . --upgrade ``` -Next, prior to installing the SDK package, you have to compile the project: +Next, prior to installing the SDK package, you need to compile the project: ```sh $ ape compile ``` -```note +```{note} The SDK package relies on a soft link in [`./sdk/py/apepay/manifest.json`](./sdk/py/apepay/manifest.json) ``` Lastly, install the SDK package via: ```sh -$ poetry install +$ pip install . +``` + +or for interactive installation do: + +```sh +$ pip install -e . ``` Then you are ready to contribute! ### Setup (JS) -In order to contribute to the JS SDK and React component library, or to build the demo app, you need to first follow the [Python Setup instructions](#setup-python) to compile the smart contract package. +In order to contribute to the JS packages, you need to first follow the [Python Setup instructions](#setup-python) to compile the smart contract package. Next, you need install the node packages for development: @@ -108,7 +114,7 @@ To deploy a StreamManager (for testing purposes), run: ```sh $ ape run deploy manager [TOKEN_ADDRESS [...]] -# Or if `ape tokens` is installed with a valid tokenlist +# Or if `ape tokens` is installed (with a valid tokenlist) $ ape run deploy manager [TOKEN_SYMBOL [...]] ``` @@ -124,26 +130,30 @@ To deploy a Token (for testing use only), run: $ ape run deploy token ``` -```note +```{warning} This test token has an unauthenticated mint, please do not use in production! ``` To run the demo ApePay cluster daemon, first run a node like `anvil`: ```sh -$ anvil --derivation-path "m/44'/60'/0'/" --block-time 1 --prune-history +$ anvil --block-time 1 --prune-history ``` -**NOTE**: the `--derivation-path` flag makes ape's test accounts match anvil's - -Then run the daemon: +Then run the example Silverback app: ```sh -$ silverback run scripts.daemon:app --network ::foundry --account TEST::0 +$ silverback run bots.example:app ``` After that, it's suggested to start `ape console` and create a stream to watch the daemon react. +We also provide a simulation you can run instead using: + +```sh +$ ape run demo +``` + ### Publishing Given the monorepo structure, it's a bit more challenging to distribute all the packages in this repo. @@ -154,23 +164,19 @@ TBD #### Python SDK -To publish the Python package, there are 5 steps. +To publish the Python package, there are 4 steps. ```sh # 1. Install everything -$ poetry install` +$ pip install .[dev] # 2. Compile the package manifest -$ ape compile -# 3. Copy the package manifest to the Python SDK folder -$ cp .build/__local__.json sdk/py/apepay/manifest.json -# 4. Build the Python SDK with Poetry -$ poetry build -# 5. Publish the package -$ poetry publish +$ ape compile -f +# 3. Build the Python SDK with twine +$ twine build +# 4. Publish the package +$ twine publish ``` -**NOTE**: make sure to revision the package before publishing, or it will fail. - #### JavaScript SDK and React component library To publish the JS SDK, do the following: diff --git a/ape-config.yaml b/ape-config.yaml index 9fa6389..875fb12 100644 --- a/ape-config.yaml +++ b/ape-config.yaml @@ -30,3 +30,13 @@ deployments: mainnet: *releases optimism: mainnet: *releases + +test: + gas: + exclude: &exclude-mocks + - contract_name: Test* + coverage: + exclude: *exclude-mocks + reports: + terminal: + verbose: true diff --git a/bots/example.py b/bots/example.py index eeb9512..dd33469 100644 --- a/bots/example.py +++ b/bots/example.py @@ -1,7 +1,5 @@ import os -from collections import defaultdict -from ape.types import AddressType from silverback import SilverbackApp from apepay import Stream, StreamManager @@ -12,24 +10,20 @@ # NOTE: You would probably want to index your db by network and deployment address, # if you were operating on multiple networks and/or deployments (for easy lookup) -db: defaultdict[AddressType, list[Stream]] = defaultdict(list) +db: dict[int, Stream] = dict() # TODO: Migrate to `app.state.db` when feature becomes available @app.on_startup() async def load_db(_): for stream in sm.active_streams(): - while len(db[stream.creator]) < stream.stream_id: - db[stream.creator].append(None) # Fill with empty values - assert stream.stream_id == len(db[stream.creator]) - db[stream.creator].append(stream) + db[stream.id] = stream @sm.on_stream_created(app) async def grant_product(stream): - assert stream.stream_id == len(db[stream.creator]) - db[stream.creator].append(stream) - print(f"provisioning product for {stream.creator}") + db[stream.id] = stream + print(f"provisioning products: {stream.products}") return stream.time_left @@ -37,12 +31,12 @@ async def grant_product(stream): async def update_product_funding(stream): # NOTE: properties of stream have changed, you may not need to handle this, but typically you # would want to update `stream.time_left` in db for use in user Stream life notifications - db[stream.creator][stream.stream_id] = stream + db[stream.id] = stream return stream.time_left @sm.on_stream_cancelled(app) async def revoke_product(stream): print(f"unprovisioning product for {stream.creator}") - db[stream.creator][stream.stream_id] = None + db[stream.id] = None return stream.time_left diff --git a/contracts/StreamFactory.vy b/contracts/StreamFactory.vy index 0a17ec1..82f4e9a 100644 --- a/contracts/StreamFactory.vy +++ b/contracts/StreamFactory.vy @@ -16,24 +16,35 @@ BLUEPRINT: public(immutable(address)) deployments: public(HashMap[address, address]) +event ManagerCreated: + owner: indexed(address) + manager: address + accepted_tokens: DynArray[address, 20] + validators: DynArray[address, 10] + + @deploy def __init__(blueprint: address): BLUEPRINT = blueprint @external -def create(validators: DynArray[address, 10], accepted_tokens: DynArray[address, 20]) -> address: - #assert self.deployments[msg.sender] == empty(address) # dev: only one deployment allowed - - deployment: address = create_from_blueprint( +def create( + accepted_tokens: DynArray[address, 20] = [], + validators: DynArray[address, 10] = [], + min_stream_time: uint256 = ONE_HOUR, +) -> address: + manager: address = create_from_blueprint( # dev: only one deployment allowed BLUEPRINT, msg.sender, # Only caller can create - ONE_HOUR, # Safety parameter (not configurable) + min_stream_time, # Safety parameter for new streams validators, accepted_tokens, # whatever caller wants to accept salt=convert(msg.sender, bytes32), # Ensures unique deployment per caller code_offset=3, ) - self.deployments[msg.sender] = deployment + self.deployments[msg.sender] = manager + + log ManagerCreated(msg.sender, manager, accepted_tokens, validators) - return deployment + return manager diff --git a/contracts/StreamManager.vy b/contracts/StreamManager.vy index 4bf5e30..64abc87 100644 --- a/contracts/StreamManager.vy +++ b/contracts/StreamManager.vy @@ -1,259 +1,539 @@ # pragma version 0.4.0 - """ @title StreamManager @author ApeWorX LTD. -@dev The Stream contract is owned by `owner`, who is the recipient of all - streams created by this contract. `owner` can specify any number of payment - tokens that they can accept as a stream. Anyone can create a stream - targeting the `owner`, as long as it is one of the tokens that `owner` - has specified as accepting. Streams can be cancelled after - `MIN_STREAM_LIFE` has elapsed, and can be backdated if needed. - - The purpose of the streams is usually as an optimistic payment method for - an off-chain good or service, so the security properties of this contract - reflect that. As such, in all extraordinary situations, the `owner` should - have the ability or right to create and enforce the terms that the payment - is for, this contract simply streamlines the creation and payment of those - defined goods or services. +@dev The Stream contract is owned by `controller`, who is the recipient of all streams created by + this contract. The `controller` can specify any number of payment tokens that they can be + accepted for streaming. Anyone can create a stream targeting the `controller`, as long as it is + one of the tokens that `controller` has specified as accepting. + + Streams can only be successfully created after passing through a set of payment term Validator + contracts. Validators can do any arbitrary logic on the parameters of an incoming stream, and + return a computed streaming rate that is aggregated across all the validators and becomes the + streaming rate that the Stream will vest funds to the `controller` at. + + Anyone (not just the stream `owner`) can add more paid time to a Stream. Streams can be cancel- + led by the `owner` at any point after `MIN_STREAM_LIFE` has elapsed, which is a parameter de- + signed to enforce the amount it takes to provision the product or service being facilitated by + this contract. + + The purpose of these Streams is usually as an optimistic payment method for an off-chain good + or service, so the security properties of this contract reflect that. As such, in all extra- + ordinary situations, the `controller` should have the ability or right to create and enforce + the terms that the payment is for, this contract simply streamlines the creation and payment of + those defined goods or services. """ - from ethereum.ercs import IERC20 from . import Validator +# List of Validator contracts to check for Stream creation, funding, migrations +# TODO: Use `Set` to ensure uniqueness, when available MAX_VALIDATORS: constant(uint8) = 10 validators: public(DynArray[Validator, MAX_VALIDATORS]) - -MAX_REASON_SIZE: constant(uint16) = 1024 MIN_STREAM_LIFE: public(immutable(uint256)) - +MAX_PRODUCTS: constant(uint8) = 20 struct Stream: + owner: address token: IERC20 - amount_per_second: uint256 - max_stream_life: uint256 funded_amount: uint256 - start_time: uint256 - last_pull: uint256 - reason: Bytes[MAX_REASON_SIZE] - -num_streams: public(HashMap[address, uint256]) -streams: public(HashMap[address, HashMap[uint256, Stream]]) + expires_at: uint256 + last_update: uint256 + last_claim: uint256 + products: DynArray[bytes32, MAX_PRODUCTS] -owner: public(address) token_is_accepted: public(HashMap[IERC20, bool]) +# Global index of Streams +num_streams: public(uint256) +streams: public(HashMap[uint256, Stream]) + +# Service Provider (has all Capabilities, also beneficiary of funding) +controller: public(address) +new_controller: public(address) +control_transfer_proposed: public(uint256) +CONTROLLER_ACCEPTANCE_DELAY: constant(uint256) = 7 * 24 * 60 * 60 # 1 week + +event NewControllerProposed: + old_controller: indexed(address) + new_controller: indexed(address) + + +event NewControllerAccepted: + old_controller: indexed(address) + new_controller: indexed(address) + + +# Delegated Abilities +flag Ability: + MODFIY_TOKENS + MODFIY_VALIDATORS + MODFIY_ACCESS + CANCEL_STREAMS + +capabilities: public(HashMap[address, Ability]) + event StreamCreated: + stream_id: indexed(uint256) + owner: indexed(address) token: indexed(IERC20) - creator: indexed(address) + funded_amount: uint256 + time_left: uint256 + products: DynArray[bytes32, MAX_PRODUCTS] + + +event StreamOwnershipUpdated: stream_id: indexed(uint256) - amount_per_second: uint256 - start_time: uint256 - reason: Bytes[MAX_REASON_SIZE] + old_owner: indexed(address) + new_owner: indexed(address) event StreamFunded: - creator: indexed(address) stream_id: indexed(uint256) - amount_added: uint256 + funder: indexed(address) + funded_amount: uint256 + time_left: uint256 -event StreamCancelled: - creator: indexed(address) +event StreamClaimed: stream_id: indexed(uint256) - amount_locked: uint256 - reason: Bytes[MAX_REASON_SIZE] + claimer: indexed(address) + is_expired: indexed(bool) + claim_amount: uint256 -event Claimed: - creator: indexed(address) +event StreamCancelled: stream_id: indexed(uint256) - stream_exhausted: indexed(bool) - claimed_amount: uint256 + canceller: indexed(address) + reason: indexed(bytes32) + refund_amount: uint256 @deploy def __init__( - owner: address, + controller: address, min_stream_life: uint256, # timedelta in seconds - validators: DynArray[Validator, MAX_VALIDATORS], accepted_tokens: DynArray[IERC20, 20], + validators: DynArray[Validator, MAX_VALIDATORS], ): - self.owner = owner + self.controller = controller MIN_STREAM_LIFE = min_stream_life - self.validators = validators for token: IERC20 in accepted_tokens: self.token_is_accepted[token] = True + self.validators = validators + @external -def set_validators(validators: DynArray[Validator, MAX_VALIDATORS]): - assert msg.sender == self.owner - self.validators = validators +def transfer_control(new_controller: address): + """ + @dev Begin the transfer of the `controller` role to `new_controller`. + @notice This action is very dangerous! Can only be performed by the `controller`. After the + transition is initiated, the `new_controller` must wait `CONTROLLER_ACCEPTANCE_DELAY` + before they are able to accept via `accept_control()`. + @param new_controller The address of the proposed new `controller` for this contract. + """ + # NOTE: can revoke transfer at any time calling this method with `self.controller` + assert msg.sender == self.controller # dev: not controller + + self.new_controller = new_controller + self.control_transfer_proposed = block.timestamp + + log NewControllerProposed(msg.sender, new_controller) @external -def add_token(token: IERC20): - assert msg.sender == self.owner - self.token_is_accepted[token] = True +def accept_control(): + """ + @dev Accept `controller` role and responsibilities. + @notice This action is very dangerous! Can only be accepted by `new_controller` after waiting + for a period of `CONTROLLER_ACCEPTANCE_DELAY`. Once accepted, all Streams claimed in the + future will be routed to the new `controller` instead of the previous one. They also will + have full and unconditional control over access capabilities for any other address. Note + that `controller` can prevent this action at any time up to `CONTROLLER_ACCEPTANCE_DELAY` + by executing `transfer_control()` with themselves (or a different address) as the proposed. + """ + assert msg.sender == self.new_controller # dev: not proposed controller + assert block.timestamp - self.control_transfer_proposed >= CONTROLLER_ACCEPTANCE_DELAY + + self.controller = msg.sender + self.new_controller = empty(address) + + log NewControllerAccepted(self.controller, msg.sender) @external -def remove_token(token: IERC20): - assert msg.sender == self.owner - self.token_is_accepted[token] = False +def set_capabilities(account: address, capabilities: Ability): + """ + @dev Set the `capabilities` of `account`. + @notice This action is very dangerous! Can only be executed by the `controller`, or by another + account that has the `MODFIY_ACCESS` capability. Please note that any capabilities granted + go into effect immediately. Any capability can be reverted at any time, but please note + if granting the `MODFIY_ACCESS` capability to any account, they will have the capability to + themselves grant that (and any other role) to any other account, presenting a potential + DDoS risk to the `controller`, and of course other dangerous capabilities that could lead + to security risks of other natures (such as malcious tokens, validators which cause DDoS). + @param account The address of the account to grant or revoke `capabilities` to. + @param capabilities The new set of abilities that `account` should have access to. + """ + if Ability.MODFIY_ACCESS not in self.capabilities[msg.sender]: + assert msg.sender == self.controller # dev: insufficient capability + + self.capabilities[account] = capabilities @external -def create_stream( - token: IERC20, - amount_per_second: uint256, - reason: Bytes[MAX_REASON_SIZE] = b"", - start_time: uint256 = block.timestamp, -) -> uint256: - assert self.token_is_accepted[token] # dev: token not accepted - assert start_time <= block.timestamp # dev: start time in future +def set_validators(validators: DynArray[Validator, MAX_VALIDATORS]): + """ + @dev Assign the set of validators that should be executed on Stream creation. + @notice This can only be called by the controller or someone with the MODFIY_VALIDATORS + capability. Applications should ensure that the array contains all unique entries, lest + unpredictable or actively harmful conditions may happen. + @param validators The array of validators to assign for this contract. + """ + if Ability.MODFIY_VALIDATORS not in self.capabilities[msg.sender]: + assert msg.sender == self.controller # dev: insufficient capability + + # TODO: Replace this logic with Set when available in Vyper + last_validator_uint: uint256 = convert(empty(address), uint256) + for validator: Validator in validators: + # NOTE: Sort in ascending order to ensure uniqueness of set + assert convert(validator.address, uint256) > last_validator_uint # dev: duplicate validator detected + last_validator_uint = convert(validator.address, uint256) + + self.validators = validators - funded_amount: uint256 = staticcall token.allowance(msg.sender, self) - if funded_amount == max_value(uint256): - funded_amount = staticcall token.balanceOf(msg.sender) - max_stream_life: uint256 = max_value(uint256) +@external +def set_token_accepted(token: IERC20, is_accepted: bool): + """ + @dev Set whether `token` is accepted by this contract. + @notice This can only be called by the controller or someone with the MODFIY_TOKENS capability. + *Please* make sure to be careful with the decimals of the token you add, since those with + very small values can cause problems when doing math with streaming rates. + @param token An ERC20-compatible token to accept or reject. + @param is_accepted A boolean value that controls whether `token` is accepted or rejected. + """ + if Ability.MODFIY_TOKENS not in self.capabilities[msg.sender]: + assert msg.sender == self.controller # dev: insufficient capability + + self.token_is_accepted[token] = is_accepted + + +def _compute_stream_life( + funder: address, + token: IERC20, + amount: uint256, + products: DynArray[bytes32, MAX_PRODUCTS], +) -> uint256: + stream_life: uint256 = 0 for validator: Validator in self.validators: - # NOTE: Validator either raises or returns a max stream life to use - max_stream_life = min( - max_stream_life, - extcall validator.validate(msg.sender, token, amount_per_second, reason), + # NOTE: Validator either raises or returns a stream life for the products based on funding + stream_life = max( + stream_life, + extcall validator.validate(msg.sender, token, amount, products), ) - assert max_stream_life >= funded_amount // amount_per_second # dev: max stream life too small + return stream_life - prefunded_stream_life: uint256 = max(MIN_STREAM_LIFE, block.timestamp - start_time) - assert max_stream_life >= prefunded_stream_life # dev: prefunded stream life too large - assert funded_amount >= prefunded_stream_life * amount_per_second # dev: not enough funds + +@external +def compute_stream_life( + funder: address, + token: IERC20, + amount: uint256, + products: DynArray[bytes32, MAX_PRODUCTS], +) -> uint256: + """ + @dev Compute the stream life for the given stream parameters using this manager's Validators + @notice Computed value is only a suggestion, but can be useful to set the value of + `min_stream_life` in `create_stream()` or `fund_stream()`. + @param token An ERC20-compatible token that this contract allows to create streams for. + @param amount The amount of `token` that should be pre-funded for this stream. + @param products An array of the product codes this stream should pay for. The product codes are + treated as application-specific parameters and have no special treatment by this contract. + Typically, validators are employted to do the specific processing necessary to compute the + stream rate for the newly created stream. + @return stream_life The amount of time that the given Stream should be exist for, computed + using the `amount` that `creator` has provided to pay for `products, as well as any + additional logical conditions specified by the connected set of validators. + """ + return self._compute_stream_life(funder, token, amount, products) + + +@external +def create_stream( + token: IERC20, + amount: uint256, + products: DynArray[bytes32, MAX_PRODUCTS], + min_stream_life: uint256 = MIN_STREAM_LIFE, +) -> uint256: + """ + @dev Create a streaming payment to `controller` using the given `token`, pre-funded with + a given `amount` of `token`, in exchange for the provisiong of `products` to the caller, + for at least `min_stream_life` length of time. + @notice This function starts the lifecycle of the Stream datastructure. It cannot be revoked + once called until at least `MIN_STREAM_LIFE` has passed. The stream's parameters flow + through a series of `validators` to check for their validity and compute the cost of + provisiong the `products` in the given `token`. The `token` must be accepted by this + contract. + @param token An ERC20-compatible token that this contract allows to create streams for. + @param amount The amount of `token` that should be pre-funded for this stream. + @param products An array of the product codes this stream should pay for. The product codes are + treated as application-specific parameters and have no special treatment by this contract. + Typically, validators are employed to do the specific processing necessary to compute the + stream rate for the newly created stream. + @param min_stream_life A safety parameter designed to ensure that the computed stream rate does + not exceed the value of `amount / min_stream_life` tokens per second. Defaults to + `MIN_STREAM_LIFE` and is validated not to be below that amount, which is the minimum length + *any* new stream can be created for (based on the time it takes provision the `products`). + @return stream_id The globally unique identifier for the newly created stream. + """ + assert min_stream_life >= MIN_STREAM_LIFE # dev: stream life not long enough + assert self.token_is_accepted[token] # dev: token not accepted assert extcall token.transferFrom( # dev: transfer fail - msg.sender, self, funded_amount, default_return_value=True + msg.sender, self, amount, default_return_value=True ) - stream_id: uint256 = self.num_streams[msg.sender] - self.streams[msg.sender][stream_id] = Stream({ + # Check all validators for any unacceptable or incorrect stream parameters, compute stream life + stream_life: uint256 = self._compute_stream_life(msg.sender, token, amount, products) + + # Ensure stream life parameters are acceptable to caller + assert stream_life >= min_stream_life # dev: stream too expensive + + # Create stream data structure and start streaming + stream_id: uint256 = self.num_streams + self.streams[stream_id] = Stream({ + owner: msg.sender, token: token, - amount_per_second: amount_per_second, - max_stream_life: max_stream_life, - funded_amount: funded_amount, - start_time: start_time, - last_pull: start_time, - reason: reason, + funded_amount: amount, + expires_at: block.timestamp + stream_life, + last_update: block.timestamp, + last_claim: block.timestamp, + products: products, }) - self.num_streams[msg.sender] = stream_id + 1 + self.num_streams = stream_id + 1 - log StreamCreated(token, msg.sender, stream_id, amount_per_second, start_time, reason) + log StreamCreated(stream_id, msg.sender, token, amount, stream_life, products) return stream_id +@external +def set_stream_owner(stream_id: uint256, new_owner: address): + """ + @dev Update the `owner` of the Stream identified by `stream_id` to `new_owner`. + @notice This action is dangerous! The `new_owner` of the Stream has the unique ability of being + able to cancel the Stream at any time, leading to potential service interruptions. This + action takes effect immediately and can only be performed by the current `owner`. + @param stream_id The identifier of the Stream to transition ownership from. + @param new_owner The address of the new `owner` of the Stream that should be assigned. + """ + assert msg.sender == self.streams[stream_id].owner + self.streams[stream_id].owner = new_owner + + log StreamOwnershipUpdated(stream_id, msg.sender, new_owner) + + @view -def _amount_unlocked(creator: address, stream_id: uint256) -> uint256: - return min( - ( - (block.timestamp - self.streams[creator][stream_id].last_pull) - * self.streams[creator][stream_id].amount_per_second - ), - self.streams[creator][stream_id].funded_amount, - ) +def _amount_claimable(stream_id: uint256) -> uint256: + expires_at: uint256 = self.streams[stream_id].expires_at + if block.timestamp >= expires_at: + return self.streams[stream_id].funded_amount # All funds vested + # NOTE: Would lead to >100% vested in return stmt if not explictly limited here + last_claim: uint256 = self.streams[stream_id].last_claim + assert last_claim < expires_at, UNREACHABLE # dev: cannot claim in the future + # NOTE: div/0 or Underflow if `last_claim >= expires_at` + + return ( + # % of funds that have vested so far, since after last claim + self.streams[stream_id].funded_amount + * (block.timestamp - last_claim) + // (expires_at - last_claim) + ) @view @external -def amount_unlocked(creator: address, stream_id: uint256) -> uint256: - return self._amount_unlocked(creator, stream_id) +def amount_claimable(stream_id: uint256) -> uint256: + """ + @dev Obtain the amount of `token` that can be claimed from `stream_id`. + @notice This is a utility function. + @param stream_id The identifier of the Stream to check for the amount of `token` that can be + claimed. + @return amount The total amount of `token` that can be claimed at this moment in time. + """ + return self._amount_claimable(stream_id) @view -def _time_left(creator: address, stream_id: uint256) -> uint256: - unlocked: uint256 = self._amount_unlocked(creator, stream_id) - return ( - (self.streams[creator][stream_id].funded_amount - unlocked) - // self.streams[creator][stream_id].amount_per_second - ) +def _time_left(stream_id: uint256) -> uint256: + if self.streams[stream_id].funded_amount == 0: + return 0 + + expires_at: uint256 = self.streams[stream_id].expires_at + if expires_at < block.timestamp: + return 0 # No time left + + return expires_at - block.timestamp @view @external -def time_left(creator: address, stream_id: uint256) -> uint256: - return self._time_left(creator, stream_id) +def time_left(stream_id: uint256) -> uint256: + """ + @dev Obtain the amount of time that is left based on the streaming rate of Stream `stream_id`. + @notice This is a utility function. + @param stream_id The identifier of the Stream to check for the amount of time left. + @return amount The total amount of time left in Stream `stream_id`. + """ + return self._time_left(stream_id) -@external -def add_funds(creator: address, stream_id: uint256, amount: uint256) -> uint256: - token: IERC20 = self.streams[creator][stream_id].token - assert extcall token.transferFrom(msg.sender, self, amount, default_return_value=True) - self.streams[creator][stream_id].funded_amount += amount - - time_left: uint256 = self._time_left(creator, stream_id) - assert ( - (time_left + block.timestamp - self.streams[creator][stream_id].start_time) - <= self.streams[creator][stream_id].max_stream_life - ) +def _claim_stream(stream_id: uint256) -> uint256: + # NOTE: Anyone can claim a stream (for the Controller) + funded_amount: uint256 = self.streams[stream_id].funded_amount + claim_amount: uint256 = self._amount_claimable(stream_id) + self.streams[stream_id].funded_amount = funded_amount - claim_amount + self.streams[stream_id].last_claim = block.timestamp - log StreamFunded(creator, stream_id, amount) - return time_left + token: IERC20 = self.streams[stream_id].token + assert extcall token.transfer(self.controller, claim_amount, default_return_value=True) + log StreamClaimed(stream_id, msg.sender, funded_amount == claim_amount, claim_amount) -@view -def _stream_is_cancelable(creator: address, stream_id: uint256) -> bool: - # Creator needs to wait `MIN_STREAM_LIFE` to cancel a stream - return self.streams[creator][stream_id].start_time + MIN_STREAM_LIFE <= block.timestamp + return claim_amount -@view @external -def stream_is_cancelable(creator: address, stream_id: uint256) -> bool: - return self._stream_is_cancelable(creator, stream_id) +def claim_stream(stream_id: uint256) -> uint256: + """ + @dev Claim all vested tokens from Stream `stream_id` and transfer to `controller`. + @notice This function is unauthenticated and can be called by anyone. This can allow any number + of use cases such as allowing automated revenue collection and optimization, or to give the + gift of gas money to `controller`! + @param stream_id The identifier of the Stream to claim vested tokens for. + @return claim_amount The amount of tokens that have been transferred to `controller`. + """ + return self._claim_stream(stream_id) @external -def cancel_stream( - stream_id: uint256, - reason: Bytes[MAX_REASON_SIZE] = b"", - creator: address = msg.sender, -) -> uint256: - if msg.sender == creator: - assert self._stream_is_cancelable(creator, stream_id) - else: - # Owner can cancel at any time - assert msg.sender == self.owner +def fund_stream(stream_id: uint256, amount: uint256, min_stream_life: uint256 = 0) -> uint256: + """ + @dev Add `amount` tokens worth of funding to Stream `stream_id`, to extend it's `time_left`. + @notice This function is unauthenticated and can be called by anyone. This can allow any + number of use cases such as allowing service self-payment, handling partial refunds or + settling disputes, or simply gifting users the gift of more time! + @param stream_id The identifier of the Stream to add `amount` of tokens for. + @param amount The total amount of tokens to add for Stream `stream_id`. + @return time_left The new amount of time left in Stream `stream_id`. + """ + # NOTE: Anyone can fund a stream + token: IERC20 = self.streams[stream_id].token + assert self.token_is_accepted[token] # dev: token not accepted + assert extcall token.transferFrom( + msg.sender, self, amount, default_return_value=True + ) + assert block.timestamp < self.streams[stream_id].expires_at - funded_amount: uint256 = self.streams[creator][stream_id].funded_amount - amount_locked: uint256 = funded_amount - self._amount_unlocked(creator, stream_id) - assert amount_locked > 0 # NOTE: reverts if stream doesn't exist, or already cancelled - self.streams[creator][stream_id].funded_amount = funded_amount - amount_locked + # NOTE: Stream claims must be up-to-date to ensure that math is correct in `_amount_claimable()` + # This is because the stream rate may change in `_compute_stream_life()` + self._claim_stream(stream_id) + # NOTE: After claim, apply all remaing funds to updated stream life (alongside amount) + funded_amount: uint256 = amount + self.streams[stream_id].funded_amount - token: IERC20 = self.streams[creator][stream_id].token - assert extcall token.transfer(creator, amount_locked, default_return_value=True) + # Check all validators for any unacceptable or incorrect stream parameters, compute stream life + products: DynArray[bytes32, MAX_PRODUCTS] = self.streams[stream_id].products + stream_life: uint256 = self._compute_stream_life(msg.sender, token, funded_amount, products) - log StreamCancelled(creator, stream_id, amount_locked, reason) + # Ensure computed stream life are acceptable to caller (rate may be different) + # NOTE: Use this to ensure stream update deadline too + assert stream_life >= min_stream_life - return funded_amount - amount_locked + # Modify stream using new stream life (which may use a different streaming rate) + self.streams[stream_id].expires_at = block.timestamp + stream_life + self.streams[stream_id].funded_amount = funded_amount + # NOTE: Use original argument `amount` and not aggregate with leftover `funded_amount` + log StreamFunded(stream_id, msg.sender, amount, stream_life) -@external -def claim(creator: address, stream_id: uint256) -> uint256: - funded_amount: uint256 = self.streams[creator][stream_id].funded_amount - claim_amount: uint256 = self._amount_unlocked(creator, stream_id) - self.streams[creator][stream_id].funded_amount = funded_amount - claim_amount - self.streams[creator][stream_id].last_pull = block.timestamp + return stream_life - token: IERC20 = self.streams[creator][stream_id].token - assert extcall token.transfer(self.owner, claim_amount, default_return_value=True) - log Claimed(creator, stream_id, funded_amount == claim_amount, claim_amount) +# TODO: `modify_stream` change `products` and sets `last_update` (authed by `owner` prevents `cancel()` DDoS) - return claim_amount + +@view +def _stream_is_cancelable(stream_id: uint256) -> bool: + # Stream owner needs to wait `MIN_STREAM_LIFE` to cancel a stream + return ( + block.timestamp < self.streams[stream_id].expires_at # is not expired yet + and self.streams[stream_id].funded_amount > 0 # has not already been cancelled + # Last update to stream parameters had a chance to be fascilitated + and (block.timestamp - self.streams[stream_id].last_update) >= MIN_STREAM_LIFE + ) + + +@view +@external +def stream_is_cancelable(stream_id: uint256) -> bool: + """ + @dev Check if Stream `stream_id` is able to be cancelled, after `MIN_STREAM_LIFE` has expires. + @notice This is a utility function. + @param stream_id The identifier of the Stream to check for the ability to cancel. + @return is_cancelable Whether Stream `stream_id` is allowed to be cancelled. + """ + return self._stream_is_cancelable(stream_id) + + +@external +def cancel_stream(stream_id: uint256, reason: bytes32 = empty(bytes32)) -> uint256: + """ + @dev Suspend the streaming of tokens to `controller` for any given `reason`, sending a + `refund_amount` back to the `owner` of Stream `stream_id`. + @notice This function can either be called by the `owner` of the stream after waiting + `MIN_STREAM_LIFE`, or immediately by the `controller` or any other account that has the + `CANCEL_STREAMS` ability. Note that calling this function also performs a final claim on + the Stream to the `controller`. No claims or cancellations should be able to be performed + after this has been called once. + @param stream_id The identifier of the Stream to cancel for `reason`. + @param reason A code explaining why the stream was cancelled. Primarily intended for telling + the `owner` (or any other impacted party) for what reason the stream was closed, but can + also be used for the `owner` to communicate to the `controller`. Defaults to an empty + sequence of 32 bytes meaning "no reason". + @return refund_amount The amount of `token` that was refunded to `owner` of Stream. + """ + stream_owner: address = self.streams[stream_id].owner + if msg.sender == stream_owner: + # Creator needs to wait `MIN_STREAM_LIFE` since last update to cancel a stream + assert self._stream_is_cancelable(stream_id) # dev: stream not cancellable yet + + elif Ability.CANCEL_STREAMS not in self.capabilities[msg.sender]: + # Controller (or those with capability to cancel) can cancel at any time + assert msg.sender == self.controller # dev: insufficient capability + + # Compute refund amount and subtract it from stream balance + funded_amount: uint256 = self.streams[stream_id].funded_amount + # NOTE: reverts if stream doesn't exist, or has already been cancelled, or is expires + refund_amount: uint256 = funded_amount - self._amount_claimable(stream_id) + assert refund_amount > 0 # dev: stream already cancelled or completed + self.streams[stream_id].funded_amount = funded_amount - refund_amount + + # Stream is now considered expired, set expiry to right now + self.streams[stream_id].expires_at = block.timestamp + + # Refund Stream owner (not canceller) + token: IERC20 = self.streams[stream_id].token + assert extcall token.transfer(stream_owner, refund_amount, default_return_value=True) + + log StreamCancelled(stream_id, msg.sender, reason, refund_amount) + + return refund_amount diff --git a/contracts/Validator.json b/contracts/Validator.json index c43a5df..9740ad9 100644 --- a/contracts/Validator.json +++ b/contracts/Validator.json @@ -4,11 +4,11 @@ "name": "validate", "stateMutability": "nonpayable", "inputs": [ - { "name": "creator", "type": "address" }, + { "name": "funder", "type": "address" }, { "name": "token", "type": "address" }, - { "name": "amount_per_second", "type": "uint256" }, - { "name": "reason", "type": "bytes" } + { "name": "amount", "type": "uint256" }, + { "name": "products", "type": "bytes32[]" } ], - "outputs": [{ "name": "max_stream_life", "type": "uint256" }] + "outputs": [{ "name": "stream_life", "type": "uint256" }] } ] diff --git a/contracts/Validator.vyi b/contracts/Validator.vyi index d28216f..b0db8e4 100644 --- a/contracts/Validator.vyi +++ b/contracts/Validator.vyi @@ -1,13 +1,60 @@ -from ethereum.ercs import IERC20 +""" +@title StreamManager Validation Interface Specification +@dev + Validators for the ApePay StreamManager are an important part of how your system can be + customized for the requirements of your product, organization, and operation of payments. + There are several use cases for a Validator, and of course any use case where you need to + "hook" into the pre-payment validation point can be covered. + + Here are some use cases for a Validator: -MAX_REASON_SIZE: constant(uint16) = 1024 + 1. Check if a stream funder is authorized to do so, based on a pre-vetted list + (see [Allowlist.vy](./validators/Allowlist.vy)) + 2. Check if a stream funder is a known bad actor, against an internal or external + list (see [Denylist.vy](./validators/Denylist.vy)) + 3. Check if the amount of time being funded is too large (e.g. operating a beta) + 4. Compute the `stream_life` for a given set of products (not all validators need to) + 5. Update and track any internal state for other purposes (airdrops, NFT mints, etc.) + 6. Emit more logs (for other Silverback services to hook onto) + +@notice + Implement this interface with any additional checks that should be performed on stream + creation, funding or migration events in order to block certain behaviors or modify the stream + time to a specific limit, as in the case of a pre-release or sunsetting product. +""" +from ethereum.ercs import IERC20 @external def validate( - creator: address, + funder: address, token: IERC20, - amount_per_second: uint256, - reason: Bytes[MAX_REASON_SIZE], + amount: uint256, + products: DynArray[bytes32, 20], ) -> uint256: + """ + @notice + Validate that the proposed Stream action initiated by `creator` and funded by `amount` of + `token` for the associated `products` is properly formed, allowed, and meets any other + application-specific behaviors required to fund the stream. Should either return 0 or + the subtotal of the cost of the subset of `products` that the validator works with. + @dev + Method can make modifications and external calls based on stream properties. + This method should raise if and only if the input matches the domain it is checking, and + violates any rules from inside that domain, otherwise will cause a Denial-of-Service issue. + Validator should not reject input that doesn't match it's domain of checks it is performing + or it will reject potentially valid streams. + Validator should return only the subtotal for all products within it's domain. + @param creator The owner of the Stream that has the action being performed against. + @param token The ERC20 token that is being used to fund the stream + @param amount The amount of `token` that is funding the stream. Can be used to ensure product + time limits are within acceptable bounds by computing a rate from the subtotal price for + the subset of matching products this validator handles. + @param products The array of product codes or reasons for the triggering of the action. Note + that the encoding of this value is entirely application-specific, and multiple supported + encodings could be potentially be supported by other connected Validators. + @return stream_life The amount of time that the given Stream should be exist for, computed + using the `amount` that `creator` has provided to pay for `products, as well as any + additional logical conditions specified by this validator. + """ ... diff --git a/contracts/test/TestToken.vy b/contracts/test/TestToken.vy index 1838119..1f6efe3 100644 --- a/contracts/test/TestToken.vy +++ b/contracts/test/TestToken.vy @@ -16,7 +16,7 @@ def __init__(): @external def transfer(receiver: address, amount: uint256) -> bool: - self.balanceOf[msg.sender] -= amount + self.balanceOf[msg.sender] -= amount # dev: not enough balance self.balanceOf[receiver] += amount # NOTE: No event return True @@ -31,8 +31,8 @@ def approve(spender: address, amount: uint256) -> bool: @external def transferFrom(sender: address, receiver: address, amount: uint256) -> bool: - self.allowance[sender][msg.sender] -= amount - self.balanceOf[sender] -= amount + self.allowance[sender][msg.sender] -= amount # dev: not enough allowance + self.balanceOf[sender] -= amount # dev: not enough balance self.balanceOf[receiver] += amount # NOTE: No event return True diff --git a/contracts/test/TestValidator.vy b/contracts/test/TestValidator.vy index 67cd569..7996236 100644 --- a/contracts/test/TestValidator.vy +++ b/contracts/test/TestValidator.vy @@ -2,16 +2,22 @@ from ethereum.ercs import IERC20 from .. import Validator +implements: Validator -MAX_REASON_SIZE: constant(uint16) = 1024 +MAX_PRODUCTS: constant(uint8) = 20 -implements: Validator @external def validate( creator: address, token: IERC20, - amount_per_second: uint256, - reason: Bytes[MAX_REASON_SIZE], + amount: uint256, + products: DynArray[bytes32, MAX_PRODUCTS], ) -> uint256: - return max_value(uint256) + rate: uint256 = 0 + + for product: bytes32 in products: + # NOTE: Tokens per second + rate += convert(product, uint256) + + return amount // rate diff --git a/contracts/validators/Allowlist.vy b/contracts/validators/Allowlist.vy new file mode 100644 index 0000000..f1c93de --- /dev/null +++ b/contracts/validators/Allowlist.vy @@ -0,0 +1,52 @@ +from ethereum.ercs import IERC20 + +from .. import Validator +implements: Validator + +MAX_PRODUCTS: constant(uint8) = 20 + +owner: public(address) +is_allowed: public(HashMap[address, bool]) + +event Allowed: + user: indexed(address) + +event Denied: + user: indexed(address) + + +@deploy +def __init__(allowed: DynArray[address, 100]): + self.owner = msg.sender + for user: address in allowed: + self.is_allowed[user] = True + log Allowed(user) + + +@external +def allow(allowed: DynArray[address, 100]): + assert msg.sender == self.owner + + for user: address in allowed: + self.is_allowed[user] = True + log Allowed(user) + + +@external +def deny(denied: DynArray[address, 100]): + assert msg.sender == self.owner + + for user: address in denied: + self.is_allowed[user] = False + log Denied(user) + + +@external +def validate( + funder: address, + token: IERC20, + amount: uint256, + products: DynArray[bytes32, MAX_PRODUCTS], +) -> uint256: + assert self.is_allowed[funder] + return 0 # This validator does not compute any product costs diff --git a/contracts/validators/Denylist.vy b/contracts/validators/Denylist.vy new file mode 100644 index 0000000..a3509e0 --- /dev/null +++ b/contracts/validators/Denylist.vy @@ -0,0 +1,60 @@ +from ethereum.ercs import IERC20 + +from .. import StreamManager +from .. import Validator +implements: Validator + +MAX_PRODUCTS: constant(uint8) = 20 + +owner: public(address) +is_denied: public(HashMap[address, bool]) + +event Allowed: + user: indexed(address) + +event Denied: + user: indexed(address) + + +@deploy +def __init__(denied: DynArray[address, 100]): + self.owner = msg.sender + for user: address in denied: + self.is_denied[user] = True + log Allowed(user) + + +@external +def allow(allowed: DynArray[address, 100]): + assert msg.sender == self.owner + + for user: address in allowed: + self.is_denied[user] = False + log Allowed(user) + + +@external +def deny(denied: DynArray[address, 100]): + assert msg.sender == self.owner + + for user: address in denied: + self.is_denied[user] = True + log Denied(user) + +# TODO: InstantiationException: contracts/StreamManager.vy is not instantiable in calldata +# @external +# def cancel_stream(manager: StreamManager, creator: address, stream_id: uint256): +# # NOTE: Batch-able via Multicall3 +# assert self.is_denied[creator] +# extcall manager.cancel_stream(creator, stream_id) + + +@external +def validate( + funder: address, + token: IERC20, + amount: uint256, + products: DynArray[bytes32, MAX_PRODUCTS], +) -> uint256: + assert not self.is_denied[funder] + return 0 # This validator does not compute any product costs diff --git a/scripts/demo.py b/scripts/demo.py index e058d3c..c1c2878 100644 --- a/scripts/demo.py +++ b/scripts/demo.py @@ -3,10 +3,10 @@ """ import random -from datetime import timedelta import click from ape.cli import ConnectedProviderCommand, ape_cli_context +from eth_pydantic_types import HashBytes32 from apepay import StreamManager @@ -33,9 +33,10 @@ def cli( # Initialize experiment deployer = cli_ctx.account_manager.test_accounts[-1] token = cli_ctx.local_project.TestToken.deploy(sender=deployer) + validator = cli_ctx.local_project.TestValidator.deploy(sender=deployer) sm = StreamManager( cli_ctx.local_project.StreamManager.deploy( - deployer, min_stream_life, [], [token], sender=deployer + deployer, min_stream_life, [token], [validator], sender=deployer ) ) @@ -53,10 +54,9 @@ def cli( for account in accounts: token.DEBUG_mint(account, 10_000 * 10**decimals, sender=account) - # 26 tokens per day - starting_life = timedelta(minutes=5).total_seconds() - starting_tokens = 26 * 10**decimals - funding_amount = 2 * 10**decimals + starting_tokens = 3 * 10**decimals # ~41.63 seconds + products = [HashBytes32(b"\x00" * 24 + b"\x01" + b"\x00" * 7)] # ~259.41 tokens/hour + funding_amount = 1 * 10**decimals # ~13.88 seconds streams = {a.address: [] for a in accounts} while cli_ctx.chain_manager.blocks.head.number < num_blocks: @@ -64,9 +64,9 @@ def cli( # Do a little garbage collection for stream in streams[payer.address]: - click.echo(f"{payer}:{stream.stream_id} - {stream.time_left}") + click.echo(f"Stream '{stream.id}' - {stream.time_left}") if not stream.is_active: - click.echo(f"Stream '{payer}:{stream.stream_id}' is expired, removing...") + click.echo(f"Stream '{stream.id}' is expired, removing...") streams[payer.address].remove(stream) if len(streams[payer.address]) > 0: @@ -74,14 +74,14 @@ def cli( if token.balanceOf(payer) >= 10 ** (decimals + 1) and random.random() < fund_stream: click.echo( - f"Stream '{payer}:{stream.stream_id}' is being funded " + f"Stream '{stream.id}' is being funded " f"w/ {funding_amount / 10**decimals:.2f} tokens..." ) token.approve(sm.address, funding_amount, sender=payer) stream.add_funds(funding_amount, sender=payer) elif random.random() < cancel_stream: - click.echo(f"Stream '{payer}:{stream.stream_id}' is being cancelled...") + click.echo(f"Stream '{stream.id}' is being cancelled...") stream.cancel(sender=payer) streams[payer.address].remove(stream) @@ -91,6 +91,6 @@ def cli( elif len(streams[payer.address]) < max_streams and random.random() < create_stream: click.echo(f"'{payer}' is creating a new stream...") token.approve(sm.address, starting_tokens, sender=payer) - stream = sm.create(token, int(starting_tokens / starting_life), sender=payer) + stream = sm.create(token, starting_tokens, products, sender=payer) streams[payer.address].append(stream) - click.echo(f"Stream '{payer}:{stream.stream_id}' was created successfully.") + click.echo(f"Stream '{stream.id}' was created successfully.") diff --git a/scripts/deploy.py b/scripts/deploy.py index 24ab274..e6e4e85 100644 --- a/scripts/deploy.py +++ b/scripts/deploy.py @@ -127,8 +127,8 @@ def manager( project.StreamManager, owner or account, min_stream_life, - list(validators), token_addresses, + list(validators), publish=publish, ) @@ -138,3 +138,10 @@ def manager( @account_option() def token(cli_ctx, account): account.deploy(project.TestToken) + + +@cli.command(cls=ConnectedProviderCommand, short_help="Deploy a Mock validator") +@ape_cli_context() +@account_option() +def validator(cli_ctx, account): + account.deploy(project.TestValidator) diff --git a/scripts/manage.py b/scripts/manage.py index 466e974..bddc9b5 100644 --- a/scripts/manage.py +++ b/scripts/manage.py @@ -1,6 +1,5 @@ import click from ape.cli import ConnectedProviderCommand, account_option, network_option -from ape.types import AddressType from ape_ethereum import multicall from apepay import StreamManager @@ -15,32 +14,25 @@ def cli(): @cli.command(cls=ConnectedProviderCommand) @network_option() -@click.option("--start-block", type=int) -@click.argument("address", type=AddressType) -def unclaimed(network, start_block, address): +@click.argument("manager", type=StreamManager) +def unclaimed(manager): """List all unclaimed streams""" - sm = StreamManager(address=address) - for stream in sm.unclaimed_streams(start_block=start_block): - click.echo( - f"{stream.creator}/{stream.stream_id}: " - f"{stream.amount_unlocked / 10 ** stream.token.decimals()} " - f"{stream.token.symbol()}" - ) + for stream in manager.unclaimed_streams(): + stream_balance = stream.amount_claimable / 10 ** stream.token.decimals() + click.echo(f"{stream.id}: {stream_balance} {stream.token.symbol()}") @cli.command(cls=ConnectedProviderCommand) @network_option() @account_option() -@click.option("--start-block", type=int) @click.option("--batch-size", type=int, default=256) @click.option("--multicall/--no-multicall", "use_multicall", default=True) -@click.argument("address", type=AddressType) -def claim(account, start_block, batch_size, use_multicall, address): +@click.argument("manager", type=StreamManager) +def claim(account, batch_size, use_multicall, manager): """Claim unclaimed streams using multicall (anyone can claim)""" - sm = StreamManager(address=address) - unclaimed_streams = sm.unclaimed_streams(start_block=start_block) + unclaimed_streams = manager.unclaimed_streams() if not use_multicall: for _ in range(batch_size): @@ -55,6 +47,7 @@ def claim(account, start_block, batch_size, use_multicall, address): click.echo(f"INFO: {len(list(unclaimed_streams))} more claims needed...") return + # else: use multicall more_streams = True while more_streams: @@ -67,7 +60,7 @@ def claim(account, start_block, batch_size, use_multicall, address): more_streams = False break - tx.add(sm.contract.claim, stream.creator, stream.stream_id) + tx.add(manager.contract.claim_stream, stream.id) try: tx(sender=account) diff --git a/sdk/py/apepay/exceptions.py b/sdk/py/apepay/exceptions.py index be7f0fe..b91c4ff 100644 --- a/sdk/py/apepay/exceptions.py +++ b/sdk/py/apepay/exceptions.py @@ -1,8 +1,6 @@ from datetime import timedelta -from typing import TYPE_CHECKING -if TYPE_CHECKING: - from . import Validator +from ape.types import AddressType class ApePayException(Exception): @@ -16,9 +14,16 @@ def __init__(self): ) -class MissingCreationReceipt(ApePayException, NotImplementedError): +class ManagerDoesNotExist(ApePayException, ValueError): def __init__(self): - super().__init__("Missing creation transaction for stream. Functionality unavailabie.") + super().__init__( + "Contract does not exist on this chain, please check the address you are using." + ) + + +class TokenNotAccepted(ApePayException, ValueError): + def __init__(self, token_details: str): + super().__init__(f"Token '{token_details}' not accepted.") class FundsNotClaimable(ApePayException): @@ -26,21 +31,20 @@ def __init__(self): super().__init__("Stream has no funds left to claim.") -class TokenNotAccepted(ApePayException, ValueError): - def __init__(self, token_details: str): - super().__init__(f"Token '{token_details}' not accepted.") +class NotEnoughAllowance(ApePayException, ValueError): + def __init__(self, manager: AddressType): + super().__init__(f"Not enough allownace, please approve {manager}") class StreamLifeInsufficient(ApePayException, ValueError): def __init__(self, stream_life: timedelta, min_stream_life: timedelta): super().__init__( f"Stream life is {stream_life}, which is not sufficient to create stream. " - f"Excepted at least {min_stream_life} of life for the stream to be created. " - f"Please wait or back-date stream by {min_stream_life - stream_life} amount " - "of time to succeed, or approve more token allowance for the stream to use." + f"Expected at least {min_stream_life} of life for the stream to be created. " + "Please increase stream funding amount in order to successfully proceed." ) -class ValidatorFailed(ApePayException, ValueError): - def __init__(self, validator: "Validator"): - super().__init__(f"Validator failed: {validator.contract}") +class NoValidProducts(ApePayException, ValueError): + def __init__(self): + super().__init__("No valid products in stream creation") diff --git a/sdk/py/apepay/factory.py b/sdk/py/apepay/factory.py index 05abe47..33451c9 100644 --- a/sdk/py/apepay/factory.py +++ b/sdk/py/apepay/factory.py @@ -2,9 +2,10 @@ from ape.contracts import ContractInstance from ape.types import AddressType, BaseInterfaceModel +from ape.utils import ZERO_ADDRESS from pydantic import field_validator -from .exceptions import NoFactoryAvailable +from .exceptions import ManagerDoesNotExist, NoFactoryAvailable from .manager import StreamManager from .package import MANIFEST @@ -36,8 +37,9 @@ def contract(self) -> ContractInstance: return MANIFEST.StreamFactory.at(self.address) def get_deployment(self, deployer: Any) -> StreamManager: - # TODO: Add product selection to the factory using `product=` kwarg (defaults to empty) - return StreamManager(self.contract.deployments(deployer)) + if (sm_address := self.contract.deployments(deployer)) == ZERO_ADDRESS: + raise ManagerDoesNotExist() + return StreamManager(sm_address) class Releases: diff --git a/sdk/py/apepay/manager.py b/sdk/py/apepay/manager.py index 9e08038..a87d855 100644 --- a/sdk/py/apepay/manager.py +++ b/sdk/py/apepay/manager.py @@ -1,26 +1,27 @@ -import json +import inspect from collections.abc import Iterator -from datetime import datetime, timedelta +from datetime import timedelta +from difflib import Differ from functools import partial, wraps from typing import TYPE_CHECKING, Any, Callable, Union, cast from ape.api import ReceiptAPI -from ape.contracts.base import ( - ContractCallHandler, - ContractEvent, - ContractInstance, - ContractTransactionHandler, -) +from ape.contracts.base import ContractEvent, ContractInstance, ContractTransactionHandler from ape.exceptions import ContractLogicError, DecodingError +from ape.logging import logger from ape.types import AddressType, HexBytes from ape.utils import BaseInterfaceModel, cached_property from ape_ethereum import multicall from pydantic import field_validator -from .exceptions import StreamLifeInsufficient, TokenNotAccepted, ValidatorFailed +from .exceptions import ( + NotEnoughAllowance, + NoValidProducts, + StreamLifeInsufficient, + TokenNotAccepted, +) from .package import MANIFEST from .streams import Stream -from .utils import time_unit_to_timedelta from .validators import Validator if TYPE_CHECKING: @@ -51,8 +52,12 @@ def __repr__(self) -> str: return f"" @property - def owner(self) -> AddressType: - return self.contract.owner() + def controller(self) -> AddressType: + return self.contract.controller() + + @property + def set_controller(self) -> ContractTransactionHandler: + return self.contract.set_controller @property def validators(self) -> list[Validator]: @@ -87,14 +92,22 @@ def _parse_validator(self) -> Callable[[_ValidatorItem], Validator]: def set_validators(self) -> ContractTransactionHandler: @wraps(self.contract.set_validators) - def order_validators(*validators: _ValidatorItem, **txn_kwargs) -> ReceiptAPI: + def set_validators(*validators: _ValidatorItem, **txn_kwargs) -> ReceiptAPI: + if len(validators) == 1 and isinstance(validators[0], (tuple, list)): + raise ValueError( + "This function accepts one or more validators to set, not a single sequence." + ) # NOTE: Always keep sets sorted, ensure no duplicates - return self.contract.set_validators( - sorted(v.address for v in set(map(self._parse_validator, validators))), - **txn_kwargs, + new_validators = sorted(v.address for v in set(map(self._parse_validator, validators))) + logger.info( + f"Setting validators for StreamManager('{self.address}')\n" + + "\n".join( + Differ().compare(tuple(v.address for v in self.validators), new_validators) + ) ) + return self.contract.set_validators(new_validators, **txn_kwargs) - return cast(ContractTransactionHandler, order_validators) + return cast(ContractTransactionHandler, set_validators) def add_validators(self, *new_validators: _ValidatorItem, **txn_kwargs) -> ReceiptAPI: return self.set_validators( @@ -102,117 +115,119 @@ def add_validators(self, *new_validators: _ValidatorItem, **txn_kwargs) -> Recei **txn_kwargs, ) + def replace_validator( + self, + old_validator: _ValidatorItem, + new_validator: _ValidatorItem, + **txn_kwargs, + ) -> ReceiptAPI: + return self.set_validators( + *( + (set(self.validators) - set([self._parse_validator(old_validator)])) + | set([self._parse_validator(new_validator)]) + ), + **txn_kwargs, + ) + def remove_validators(self, *old_validators: _ValidatorItem, **txn_kwargs) -> ReceiptAPI: return self.set_validators( *(set(self.validators) - set(map(self._parse_validator, old_validators))), **txn_kwargs, ) - @property - def add_token(self) -> ContractTransactionHandler: - return self.contract.add_token + def add_token(self, token: AddressType, **txn_kwargs) -> ReceiptAPI: + return self.contract.set_token_accepted(token, True, **txn_kwargs) - @property - def remove_token(self) -> ContractTransactionHandler: - return self.contract.remove_token + def remove_token(self, token: AddressType, **txn_kwargs) -> ReceiptAPI: + return self.contract.set_token_accepted(token, False, **txn_kwargs) - @property - def is_accepted(self) -> ContractCallHandler: - return self.contract.token_is_accepted + def is_accepted(self, token: AddressType) -> bool: + return self.contract.token_is_accepted(token) @cached_property def MIN_STREAM_LIFE(self) -> timedelta: # NOTE: Immutable in contract return timedelta(seconds=self.contract.MIN_STREAM_LIFE()) + def compute_stream_life( + self, + funder: AddressType, + token: Any, + amount: str | int, + products: list[HexBytes], + ) -> timedelta: + return timedelta( + # NOTE: Need to use call because it's technically `nonpayable` + seconds=self.contract.compute_stream_life.call( + funder, + token, + amount, + products, + ) + ) + def create( self, token: ContractInstance, - amount_per_second: str | int, - reason: HexBytes | bytes | str | dict | None = None, - start_time: datetime | int | None = None, + amount: str | int, + products: list[HexBytes], + min_stream_life: timedelta | int | None = None, **txn_kwargs, ) -> "Stream": - if not self.is_accepted(token): + if not self.is_accepted(token.address): # for mypy raise TokenNotAccepted(str(token)) - if isinstance(amount_per_second, str) and "/" in amount_per_second: - value, time = amount_per_second.split("/") - amount_per_second = int( - self.conversion_manager.convert(value.strip(), int) - / time_unit_to_timedelta(time).total_seconds() + if sender := txn_kwargs.get("sender"): + # NOTE: `sender` must always be present, but fallback on ape's exception + if min(token.balanceOf(sender), token.allowance(sender, self.address)) < amount: + raise NotEnoughAllowance(self.address) + + if min_stream_life is not None: + if isinstance(min_stream_life, int): + # NOTE: Convert for later + min_stream_life = timedelta(seconds=min_stream_life) + + elif ( + computed_stream_life := self.compute_stream_life(sender, token, amount, products) + ) < timedelta(seconds=0): + # NOTE: Special trapdoor if no validators picked up the product codes + raise NoValidProducts() + + elif computed_stream_life < self.MIN_STREAM_LIFE: + raise StreamLifeInsufficient( + stream_life=computed_stream_life, + min_stream_life=self.MIN_STREAM_LIFE, ) - if amount_per_second == 0: - raise ValueError("`amount_per_second` must be greater than 0.") + else: + # NOTE: Use this as a safety invariant for StreamManager logic + min_stream_life = computed_stream_life - args: list[Any] = [token, amount_per_second] - - if reason is not None: - if isinstance(reason, dict): - reason = json.dumps(reason, separators=(",", ":")) - - if isinstance(reason, str): - reason = reason.encode("utf-8") - - args.append(reason) - - if start_time is not None: - if len(args) == 2: - args.append(b"") # Add empty reason string - - if isinstance(start_time, datetime): - args.append(int(start_time.timestamp())) - - elif isinstance(start_time, int) and start_time < 0: - args.append(self.chain_manager.pending_timestamp + start_time) - - else: - args.append(start_time) - - if sender := hasattr(token, "allowance") and txn_kwargs.get("sender"): - allowance = token.allowance(sender, self.contract) - - if allowance == 2**256 - 1: # NOTE: Sentinel value meaning "all balance" - allowance = token.balanceOf(sender) - - stream_life = allowance // amount_per_second - - if stream_life < self.MIN_STREAM_LIFE.total_seconds(): - raise StreamLifeInsufficient( - stream_life=timedelta(seconds=stream_life), - min_stream_life=self.MIN_STREAM_LIFE, - ) - - validator_args = [sender, *args[:2]] - # Arg 3 (reason) is optional - if len(args) == 3: - validator_args.append(args[2]) - else: - validator_args.append(b"") - # Skip arg 4 (start_time) - - for v in self.validators: - if not v(*validator_args): - raise ValidatorFailed(v) - - tx = self.contract.create_stream(*args, **txn_kwargs) - - event = tx.events.filter(self.contract.StreamCreated)[-1] - return Stream.from_event( - manager=self, - event=event, - is_creation_event=True, + tx = self.contract.create_stream( + token, + amount, + products, + int(min_stream_life.total_seconds()), + **txn_kwargs, ) + # NOTE: Does not require tracing (unlike `.return_value`) + log = tx.events.filter(self.contract.StreamCreated)[-1] + return Stream(manager=self, id=log.stream_id) + def _parse_stream_decorator(self, app: "SilverbackApp", container: ContractEvent): def decorator(f): @app.on_(container) @wraps(f) - def inner(log): - return f(Stream(manager=self, creator=log.creator, stream_id=log.stream_id)) + async def inner(log, **dependencies): + result = f(Stream(manager=self, id=log.stream_id), **dependencies) + + if inspect.isawaitable(result): + return await result + + return result return inner @@ -255,7 +270,7 @@ def on_stream_claimed(self, app: "SilverbackApp"): def do_something(stream): ... # Use `stream` to update your infrastructure """ - return self._parse_stream_decorator(app, self.contract.Claimed) + return self._parse_stream_decorator(app, self.contract.StreamClaimed) def on_stream_cancelled(self, app: "SilverbackApp"): """ @@ -270,30 +285,16 @@ def do_something(stream): """ return self._parse_stream_decorator(app, self.contract.StreamCancelled) - def streams_by_creator(self, creator: AddressType) -> Iterator["Stream"]: - for stream_id in range(self.contract.num_streams(creator)): - yield Stream(manager=self, creator=creator, stream_id=stream_id) - - def all_streams(self, start_block: int | None = None) -> Iterator["Stream"]: - if start_block is None and self.contract.creation_metadata: - start_block = self.contract.creation_metadata.block - - for stream_created_event in self.contract.StreamCreated.range( - start_block or 0, - self.chain_manager.blocks.head.number, - ): - yield Stream.from_event( - manager=self, - event=stream_created_event, - is_creation_event=True, - ) + def all_streams(self) -> Iterator[Stream]: + for stream_id in range(self.contract.num_streams()): + yield Stream(manager=self, id=stream_id) - def active_streams(self, start_block: int | None = None) -> Iterator["Stream"]: - for stream in self.all_streams(start_block=start_block): + def active_streams(self) -> Iterator[Stream]: + for stream in self.all_streams(): if stream.is_active: yield stream - def unclaimed_streams(self, start_block: int | None = None) -> Iterator["Stream"]: - for stream in self.all_streams(start_block=start_block): - if not stream.is_active and stream.amount_unlocked > 0: + def unclaimed_streams(self) -> Iterator[Stream]: + for stream in self.all_streams(): + if stream.amount_claimable > 0: yield stream diff --git a/sdk/py/apepay/streams.py b/sdk/py/apepay/streams.py index 9e8459e..a13d15b 100644 --- a/sdk/py/apepay/streams.py +++ b/sdk/py/apepay/streams.py @@ -1,16 +1,13 @@ -import json -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from decimal import Decimal from functools import partial -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, cast -from ape.api import ReceiptAPI from ape.contracts.base import ContractInstance, ContractTransactionHandler -from ape.types import AddressType, ContractLog, HexBytes +from ape.types import AddressType, HexBytes from ape.utils import BaseInterfaceModel, cached_property -from pydantic import field_validator -from .exceptions import FundsNotClaimable, MissingCreationReceipt +from .exceptions import FundsNotClaimable if TYPE_CHECKING: from .manager import StreamManager @@ -20,140 +17,84 @@ class Stream(BaseInterfaceModel): manager: "StreamManager" - creator: AddressType - stream_id: int - creation_receipt: ReceiptAPI | None = None - transaction_hash: HexBytes | None = None - - @field_validator("transaction_hash", mode="before") - def normalize_transaction_hash(cls, value: Any) -> HexBytes | None: - if value: - return HexBytes(cls.conversion_manager.convert(value, bytes)) - - return value - - @field_validator("creator", mode="before") - def validate_addresses(cls, value): - return ( - value if isinstance(value, str) else cls.conversion_manager.convert(value, AddressType) - ) - - @classmethod - def from_event( - cls, - manager: "StreamManager", - event: ContractLog, - is_creation_event: bool = False, - ) -> "Stream": - return cls( - manager=manager, - creator=event.creator, - stream_id=event.stream_id, - transaction_hash=event.transaction_hash if is_creation_event else None, - ) - - def to_event(self) -> ContractLog: - return self.receipt.events.filter(self.manager.contract.StreamCreated)[0] + id: int @property def contract(self) -> ContractInstance: return self.manager.contract - @property - def receipt(self) -> ReceiptAPI: - if self.creation_receipt: - return self.creation_receipt - - if self.transaction_hash: - receipt = self.chain_manager.get_receipt(self.transaction_hash.hex()) - self.creation_receipt = receipt - return receipt - - raise MissingCreationReceipt() - def __repr__(self) -> str: - return ( - f"" - ) + return f"" @property def info(self): - return self.contract.streams(self.creator, self.stream_id) + return self.contract.streams(self.id) @cached_property def token(self) -> ContractInstance: + # NOTE: This cannot be updated try: from ape_tokens.managers import ERC20 # type: ignore[import-not-found] + except ImportError: - ERC20 = None + ERC20 = None # type: ignore[assignment] return self.chain_manager.contracts.instance_at(self.info.token, contract_type=ERC20) - @cached_property - def amount_per_second(self) -> int: - return self.info.amount_per_second - @property def funding_rate(self) -> Decimal: """ - Funding rate, in tokens per second, of Stream in correct decimal form. + Funding rate, in tokens per second, of Stream in human-readable decimal form. """ - return Decimal(self.amount_per_second) / Decimal(10 ** self.token.decimals()) + info = self.info # NOTE: Avoid calling contract twice by caching + + return ( + Decimal(info.funded_amount) + / Decimal(info.expires_at - info.last_claim) + / Decimal(10 ** self.token.decimals()) + ) - def estimate_funding(self, period: timedelta) -> int: + def estimate_funding(self, period: timedelta) -> Decimal: """ - Useful for estimating how many tokens you need to add to extend for a specific time period. + Useful for displaying how many tokens you need to add to extend for a specific time period. """ - return int(period.total_seconds() * self.amount_per_second) + return int(period.total_seconds()) * self.funding_rate @cached_property - def start_time(self) -> datetime: - return datetime.fromtimestamp(self.info.start_time) - - @cached_property - def reason(self) -> HexBytes | str | dict: - try: - reason_str = self.info.reason.decode("utf-8") - - except Exception: - return self.info.reason + def products(self) -> list[HexBytes]: + # NOTE: This cannot be updated + return self.info.products - try: - return json.loads(reason_str) - - except (Exception, json.JSONDecodeError): - return reason_str + @property + def owner(self) -> AddressType: + return self.info.owner @property - def last_pull(self) -> datetime: - return datetime.fromtimestamp(self.info.last_pull) + def expires_at(self) -> datetime: + return datetime.fromtimestamp(self.info.expires_at, timezone.utc) @property - def amount_unlocked(self) -> int: - return self.contract.amount_unlocked(self.creator, self.stream_id) + def last_update(self) -> datetime: + return datetime.fromtimestamp(self.info.last_update, timezone.utc) @property - def amount_locked(self) -> int: - return self.info.funded_amount - self.amount_unlocked + def last_claim(self) -> datetime: + return datetime.fromtimestamp(self.info.last_claim, timezone.utc) @property - def time_left(self) -> timedelta: - seconds = self.contract.time_left(self.creator, self.stream_id) - return timedelta(seconds=min(MAX_DURATION_SECONDS, seconds)) + def amount_claimable(self) -> int: + return self.contract.amount_claimable(self.id) @property - def total_time(self) -> timedelta: - info = self.info # NOTE: Avoid calling contract twice - # NOTE: Measure time-duration of unclaimed amount remaining (locked and unlocked) - max_life = int(info.funded_amount / info.amount_per_second) + def amount_refundable(self) -> int: + # NOTE: Max `.amount_claimable` can be is `.funded_amount` + return self.info.funded_amount - self.amount_claimable - return ( - # NOTE: `last_pull == start_time` if never pulled - datetime.fromtimestamp(info.last_pull) - - datetime.fromtimestamp(info.start_time) - + timedelta(seconds=min(MAX_DURATION_SECONDS, max_life)) - ) + @property + def time_left(self) -> timedelta: + seconds = self.contract.time_left(self.id) + assert seconds < MAX_DURATION_SECONDS, "Invaraint wrong" + return timedelta(seconds=seconds) @property def is_active(self) -> bool: @@ -163,26 +104,26 @@ def is_active(self) -> bool: def add_funds(self) -> ContractTransactionHandler: return cast( ContractTransactionHandler, - partial(self.contract.add_funds, self.creator, self.stream_id), + partial(self.contract.fund_stream, self.id), ) @property def is_cancelable(self) -> bool: - return self.contract.stream_is_cancelable(self.creator, self.stream_id) + return self.contract.stream_is_cancelable(self.id) @property def cancel(self) -> ContractTransactionHandler: return cast( ContractTransactionHandler, - partial(self.contract.cancel_stream, self.stream_id), + partial(self.contract.cancel_stream, self.id), ) @property def claim(self) -> ContractTransactionHandler: - if not self.amount_unlocked > 0: + if not self.amount_claimable > 0: raise FundsNotClaimable() return cast( ContractTransactionHandler, - partial(self.contract.claim, self.creator, self.stream_id), + partial(self.contract.claim_stream, self.id), ) diff --git a/sdk/py/apepay/validators.py b/sdk/py/apepay/validators.py index f2921de..c885340 100644 --- a/sdk/py/apepay/validators.py +++ b/sdk/py/apepay/validators.py @@ -1,7 +1,6 @@ from typing import TYPE_CHECKING, Any from ape.contracts.base import ContractInstance -from ape.exceptions import ContractLogicError from ape.types import AddressType from ape.utils import BaseInterfaceModel from eth_utils import to_int @@ -26,6 +25,9 @@ def __init__(self, address: str | AddressType, /, *args, **kwargs): kwargs["address"] = address super().__init__(*args, **kwargs) + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.address})" + @field_validator("address", mode="before") def normalize_address(cls, value: Any) -> AddressType: if isinstance(value, Validator): @@ -58,14 +60,12 @@ def __eq__(self, other: Any) -> bool: # Try __eq__ from the other side. return NotImplemented - def __call__(self, *args, **kwargs) -> bool: - try: - # NOTE: Imitate that the call is coming from the specified StreamManager. - # Also note that a validator can be connected to >1 StreamManagers. - self.contract._mutable_methods_["validate"].call( - *args, sender=self.manager.address, **kwargs - ) - return True - - except ContractLogicError: - return False + def __call__(self, *args, **kwargs) -> int: + return self.contract._mutable_methods_["validate"].call( + *args, # NOTE: These must be properly formed downstream before calling + # NOTE: Imitate that the call is coming from the connected StreamManager, because a + # validator can be connected to >1 StreamManagers so context may be important. + sender=self.manager.address, + gas_price="0 gwei", # NOTE: Avoid gas balance issues + **kwargs, # NOTE: Do last so it can override above (if necessary) + ) # Sum of product cost(s) for this particular validator diff --git a/tests/conftest.py b/tests/conftest.py index 701568e..11aa59a 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,16 +1,24 @@ +from datetime import timedelta +from decimal import Decimal + import pytest +from ape.api import AccountAPI +from eth_pydantic_types import HashBytes32 +from eth_utils import to_bytes, to_int from apepay import StreamManager +ONE_HOUR = timedelta(hours=1) + @pytest.fixture(scope="session") -def owner(accounts): - return accounts[0] +def controller(accounts): + return accounts[9] @pytest.fixture(scope="session") def payer(accounts): - return accounts[1] + return accounts[0] @pytest.fixture(scope="session") @@ -21,46 +29,38 @@ def create_token(deployer): return create_token -@pytest.fixture(scope="session", params=["0 tokens", "1 token", "2 tokens"]) -def tokens(create_token, payer, request): - return [create_token(payer) for _ in range(int(request.param.split(" ")[0]) + 1)] - - @pytest.fixture(scope="session") -def token(tokens): - if len(tokens) == 0: - pytest.skip("No valid tokens") - - return tokens[0] +def token(create_token, payer): + return create_token(payer) @pytest.fixture(scope="session") -def starting_balance(token, payer): - # NOTE: All tokens start with the same balance - return token.balanceOf(payer) - - -@pytest.fixture(scope="session") -def create_validator(owner, project): +def create_validator(project, controller): def create_validator(): - return owner.deploy(project.TestValidator) + return controller.deploy(project.TestValidator) return create_validator -@pytest.fixture(scope="session", params=["0 validators", "1 validator", "2 validators"]) -def validators(create_validator, request): - return [create_validator() for _ in range(int(request.param.split(" ")[0]) + 1)] +@pytest.fixture(scope="session") +def validator(create_validator): + return create_validator() @pytest.fixture(scope="session") def MIN_STREAM_LIFE(): - return 60 * 60 # 1 hour in seconds + return ONE_HOUR @pytest.fixture(scope="session") -def stream_manager_contract(owner, project, MIN_STREAM_LIFE, validators, tokens): - return owner.deploy(project.StreamManager, owner, MIN_STREAM_LIFE, validators, tokens) +def stream_manager_contract(project, controller, token, validator, MIN_STREAM_LIFE): + return project.StreamManager.deploy( + controller, + int(MIN_STREAM_LIFE.total_seconds()), + [token], + [validator], + sender=controller, + ) @pytest.fixture(scope="session") @@ -68,6 +68,53 @@ def stream_manager(stream_manager_contract): return StreamManager(stream_manager_contract) +@pytest.fixture(scope="session", params=["1 product", "2 products", "3 products"]) +def products(request): + return [ + # NOTE: 0x[25 empty bytes]01 ~= 0.00028... tokens/second ~= 1.01... tokens/hr + # also, `sum(1, 2, 3, ..., n) = n * (n - 1) / 2` + HashBytes32(b"\x00" * 25 + to_bytes(product_code) + b"\x00" * 6) + for product_code in range(1, int(request.param.split(" ")[0]) + 1) + ] + + +@pytest.fixture(scope="session", params=["1 hour", "2 hours", "12 hours"]) +def stream_life(request): + return int(request.param.split(" ")[0]) * ONE_HOUR + + +@pytest.fixture(scope="session") +def funding_rate(token, products): + return Decimal(sum(map(to_int, products))) / Decimal(10 ** token.decimals()) + + +@pytest.fixture(scope="session") +def create_stream(chain, stream_manager, token, payer, products, stream_life, funding_rate): + def create_stream( + amount: int | None = None, + sender: AccountAPI | None = None, + allowance: int = (2**256 - 1), + **txn_args, + ): + if amount is None: + amount = int( + Decimal(stream_life.total_seconds()) + * funding_rate + # NOTE: To undo the adjustment factor from above + * Decimal(10 ** token.decimals()) + ) + assert amount <= token.balanceOf(sender or payer) + + if token.allowance(sender or payer, stream_manager.address) != allowance: + token.approve(stream_manager.address, allowance, sender=(sender or payer)) + + return stream_manager.create(token, amount, products, sender=(sender or payer), **txn_args) + + return create_stream + + @pytest.fixture(scope="session") -def stream(stream_manager, token, payer): - return stream_manager.create(token, 1000, sender=payer) +def stream(chain, create_stream): + # TODO: Remove when https://github.com/ApeWorX/ape/pull/2277 merges + with chain.isolate(): + yield create_stream() diff --git a/tests/test_manager.py b/tests/test_manager.py new file mode 100644 index 0000000..0ec7c82 --- /dev/null +++ b/tests/test_manager.py @@ -0,0 +1,19 @@ +from datetime import timedelta + + +def test_init(stream_manager, controller, validator, token): + assert stream_manager.MIN_STREAM_LIFE == timedelta(hours=1) + assert stream_manager.controller == controller + assert stream_manager.validators == [validator] + assert stream_manager.is_accepted(token) + + +def test_add_rm_tokens(stream_manager, controller, create_token): + new_token = create_token(controller) + assert not stream_manager.is_accepted(new_token) + + stream_manager.add_token(new_token, sender=controller) + assert stream_manager.is_accepted(new_token) + + stream_manager.remove_token(new_token, sender=controller) + assert not stream_manager.is_accepted(new_token) diff --git a/tests/test_stream.py b/tests/test_stream.py index a6865c1..4fa50e6 100644 --- a/tests/test_stream.py +++ b/tests/test_stream.py @@ -1,4 +1,5 @@ -from datetime import datetime, timedelta +from datetime import timedelta +from decimal import Decimal import ape import pytest @@ -6,128 +7,154 @@ from apepay import exceptions as apepay_exc -def test_init(stream_manager, owner, validators, tokens): - assert stream_manager.MIN_STREAM_LIFE == timedelta(hours=1) - assert stream_manager.owner == owner - assert stream_manager.validators == validators +def test_create_stream(chain, payer, token, funding_rate, stream_life, create_stream, products): + with pytest.raises(apepay_exc.NotEnoughAllowance): + create_stream(allowance=0) - for token in tokens: - assert stream_manager.is_accepted(token) + with pytest.raises(apepay_exc.NotEnoughAllowance): + create_stream(amount=token.balanceOf(payer) + 1) + stream = create_stream() + total_funded = stream.info.funded_amount -def test_set_validators(stream_manager, owner, create_validator): - new_validator = create_validator() - assert new_validator not in stream_manager.validators - - stream_manager.add_validators(new_validator, sender=owner) - assert new_validator in stream_manager.validators - - stream_manager.remove_validators(new_validator, sender=owner) - assert new_validator not in stream_manager.validators - - -def test_add_rm_tokens(stream_manager, owner, tokens, create_token): - new_token = create_token(owner) - assert new_token not in tokens - assert not stream_manager.is_accepted(new_token) - - stream_manager.add_token(new_token, sender=owner) - assert stream_manager.is_accepted(new_token) + assert stream.id == 0 # Sanity check that isolation is working + assert stream.token == token + assert stream.owner == payer + assert stream.funding_rate == funding_rate + assert stream.products == products + + assert stream.is_active is True + assert stream.amount_claimable == 0 + assert stream.amount_refundable == total_funded + assert stream.time_left == stream_life + assert int(stream.last_update.timestamp()) == chain.blocks.head.timestamp + + # Mine to the end of the stream + chain.mine(deltatime=int(stream.time_left.total_seconds())) + + assert stream.is_active is False + assert stream.amount_claimable == total_funded + assert stream.amount_refundable == 0 + assert stream.time_left == timedelta(seconds=0) + + # Double check that if you call this at some point after the end, nothing changes + chain.mine(deltatime=60 * 60) + + assert stream.is_active is False + assert stream.amount_claimable == total_funded + assert stream.amount_refundable == 0 + assert stream.time_left == timedelta(seconds=0) + + +def test_fund_stream(chain, token, payer, stream, stream_life, funding_rate, accounts, controller): + old_expiry = stream.info.expires_at + ONE_HOUR = timedelta(hours=1) + amount = int( + Decimal(ONE_HOUR.total_seconds()) + * funding_rate + # NOTE: To undo the adjustment factor + * Decimal(10 ** token.decimals()) + ) + + assert stream.time_left == stream_life + + stream.add_funds(amount, sender=payer) + + # Move the time ahead to the old expiration + chain.mine(timestamp=old_expiry) + assert stream.is_active + assert stream.time_left == ONE_HOUR + + # Anyone can pay + somebody = accounts[3] + assert somebody != controller and somebody != payer + token.transfer(somebody, amount, sender=payer) # NOTE: payer made token + token.approve(stream.manager.address, amount, sender=somebody) + stream.add_funds(amount, sender=somebody) + + # Move the time ahead another hour, should still be time left + chain.mine(timestamp=old_expiry + int(ONE_HOUR.total_seconds())) + assert stream.is_active + assert stream.time_left == ONE_HOUR + + # Now move it to expiry + chain.mine(deltatime=int(ONE_HOUR.total_seconds())) + assert not stream.is_active + assert stream.time_left == timedelta(seconds=0) - stream_manager.remove_token(new_token, sender=owner) - assert not stream_manager.is_accepted(new_token) + # After expiring, no one can pay + with ape.reverts(): + stream.add_funds(amount, sender=payer) -@pytest.fixture(scope="session") -def create_stream(stream_manager, payer, MIN_STREAM_LIFE): - def create_stream( - token=None, amount_per_second=None, sender=None, allowance=(2**256 - 1), **extra_args - ): - if amount_per_second is None: - # NOTE: Maximum amount we can afford to send (using 1 hr pre-allocation) - amount_per_second = token.balanceOf(sender or payer) // MIN_STREAM_LIFE +def test_cancel_stream(chain, token, payer, controller, MIN_STREAM_LIFE, stream_life, stream): + # Ensure that we are at 1 second before Stream is cancellable + cancel_time = stream.last_update + MIN_STREAM_LIFE - timedelta(seconds=1) + chain.pending_timestamp = int(cancel_time.timestamp()) - if token.allowance(sender or payer, stream_manager.contract) != allowance: - token.approve(stream_manager.contract, allowance, sender=sender or payer) + # NOTE: `controller` starting balance is 0 + starting_balance = token.balanceOf(payer) + claimable = int( + stream.estimate_funding(cancel_time - stream.last_update) * (10 ** token.decimals()) + ) + refundable = stream.info.funded_amount - claimable + assert token.balanceOf(stream.contract) == refundable + claimable - return stream_manager.create( - token, - amount_per_second, - **extra_args, - sender=sender or payer, + with chain.isolate(): + tx = stream.cancel(b"Because I felt like it", sender=controller) + assert ( + tx.block.timestamp - stream.info.last_update == int(MIN_STREAM_LIFE.total_seconds()) - 1 ) - return create_stream - - -@pytest.mark.parametrize( - "extra_args", - [ - dict(), - dict(reason="Just trying out a reason"), - dict( - reason={ - "ecosystem_id": 13, - "custom_block_time": 10, - "bot_counts": {"1": 4, "10": 1, "42": 16}, - } - ), - dict(start_time=-1000), - ], -) -def test_create_stream(chain, payer, token, create_stream, MIN_STREAM_LIFE, extra_args): - with pytest.raises(apepay_exc.StreamLifeInsufficient): - create_stream(token, allowance=0, **extra_args) - - amount_per_second = token.balanceOf(payer) // MIN_STREAM_LIFE - - with pytest.raises(apepay_exc.StreamLifeInsufficient): - # NOTE: Performs approval - create_stream(token, amount_per_second=amount_per_second + 1, **extra_args) - - stream = create_stream(token, **extra_args) - start_time = chain.blocks.head.timestamp - - assert stream.token == token - assert stream.stream_id == 0 - assert stream.creator == payer - assert stream.amount_per_second == amount_per_second - assert stream.reason == extra_args.get("reason", "") + # Only claimable amount left in stream + assert stream.amount_refundable == 0 + assert stream.amount_claimable == token.balanceOf(stream.contract.address) == claimable + assert token.balanceOf(payer) == starting_balance + refundable + assert token.balanceOf(controller) == 0 # No claim happened + assert not stream.is_active - expected = datetime.fromtimestamp(start_time + extra_args.get("start_time", 0)) - assert stream.start_time - expected <= timedelta(seconds=1), "Unexpected start time" + with ape.reverts(): + # Payer has to wait `MIN_STREAM_LIFE` + stream.cancel(sender=payer) + # Now, teleport to the cancellable threshold + cancel_time = stream.last_update + MIN_STREAM_LIFE + chain.pending_timestamp = int(cancel_time.timestamp()) -@pytest.fixture -def stream(create_stream, token, payer, MIN_STREAM_LIFE): - # NOTE: Use 2 hour stream life - amount_per_second = token.balanceOf(payer) // (2 * MIN_STREAM_LIFE) - return create_stream(token, amount_per_second=amount_per_second) + if stream_life == MIN_STREAM_LIFE: + chain.mine() # Make sure we are at this block specifially + assert stream.amount_refundable == 0 + assert stream.amount_claimable == token.balanceOf(stream.contract.address) + assert token.balanceOf(controller) == 0 # No claim happened + assert not stream.is_active + with ape.reverts(): + stream.cancel(sender=controller) -def test_cancel_stream(chain, token, payer, starting_balance, owner, MIN_STREAM_LIFE, stream): - with chain.isolate(): - # Owner can cancel at any time - stream.cancel(b"Because I felt like it", payer, sender=owner) - assert token.balanceOf(stream.contract) == stream.amount_unlocked - assert token.balanceOf(payer) == starting_balance - stream.amount_unlocked - assert not stream.is_active + with ape.reverts(): + stream.cancel(sender=payer) - with ape.reverts(): - # Payer has to wait `MIN_STREAM_LIFE` - stream.cancel(sender=payer) + return # NOTE: Skip rest of test because it's no longer able to be cancelled once expired - chain.pending_timestamp += MIN_STREAM_LIFE + claimable = int( + stream.estimate_funding(cancel_time - stream.last_update) * (10 ** token.decimals()) + ) + refundable = stream.info.funded_amount - claimable + assert token.balanceOf(stream.contract) == refundable + claimable with chain.isolate(): # Owner can still cancel at any time - stream.cancel(b"Because I felt like it", payer, sender=owner) - assert token.balanceOf(stream.contract) == stream.amount_unlocked - assert token.balanceOf(payer) + stream.amount_unlocked == starting_balance + stream.cancel(b"Because I felt like it", sender=controller) + assert stream.amount_refundable == 0 + assert stream.amount_claimable == token.balanceOf(stream.contract.address) == claimable + assert token.balanceOf(payer) == starting_balance + refundable + assert token.balanceOf(controller) == 0 # No claim happened assert not stream.is_active # Payer can cancel after `MIN_STREAM_LIFE` stream.cancel(sender=payer) - assert token.balanceOf(stream.contract) == stream.amount_unlocked - assert token.balanceOf(payer) + stream.amount_unlocked == starting_balance + assert stream.amount_refundable == 0 + assert stream.amount_claimable == token.balanceOf(stream.contract.address) == claimable + assert token.balanceOf(payer) == starting_balance + refundable + assert token.balanceOf(controller) == 0 # No claim happened assert not stream.is_active diff --git a/tests/test_validators.py b/tests/test_validators.py new file mode 100644 index 0000000..5e2d57f --- /dev/null +++ b/tests/test_validators.py @@ -0,0 +1,27 @@ +import pytest + + +@pytest.mark.parametrize("num_validators", [1, 2, 3]) +def test_set_validators(stream_manager, controller, create_validator, num_validators): + new_validators = [create_validator() for _ in range(num_validators)] + assert all(v not in stream_manager.validators for v in new_validators) + assert len(stream_manager.validators) == 1 + + stream_manager.add_validators(*new_validators, sender=controller) + assert all(v in stream_manager.validators for v in new_validators) + assert len(stream_manager.validators) == num_validators + 1 + + stream_manager.remove_validators(*new_validators, sender=controller) + assert all(v not in stream_manager.validators for v in new_validators) + assert len(stream_manager.validators) == 1 + + stream_manager.set_validators(*new_validators, sender=controller) + assert all(v in stream_manager.validators for v in new_validators) + assert len(stream_manager.validators) == num_validators + + obselete = new_validators[0] + replacement = create_validator() + stream_manager.replace_validator(obselete, replacement, sender=controller) + assert obselete not in stream_manager.validators + assert replacement in stream_manager.validators + assert len(stream_manager.validators) == len(new_validators)