Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pdr_backend/analytics/check_network.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def do_query_network(subgraph_url: str, lookback_hours: int):
start_ut = cur_ut - lookback_hours * 60 * 60
query = """
{
predictContracts{
predictContracts(where: {paused: false}){
id
token{
name
Expand Down
40 changes: 38 additions & 2 deletions pdr_backend/cli/cli_arguments.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@
pdr predictoor PPSS_FILE NETWORK
pdr dashboard PPSS_FILE NETWORK
pdr trader APPROACH PPSS_FILE NETWORK
pdr claim_payouts PPSS_FILE
pdr claim_payouts PPSS_FILE [--include_paused]
pdr claim_ROSE PPSS_FILE
pdr pause_predictions ADDRESSES PPSS_FILE NETWORK
"""

HELP_HELP = """
Expand Down Expand Up @@ -276,6 +277,17 @@ def add_argument_NATIVE_TOKEN(self):
)


@enforce_types
class INCLUDE_PAUSED_Mixin:
def add_argument_INCLUDE_PAUSED(self):
self.add_argument(
"--include_paused",
action="store_true",
default=False,
help="Include paused contracts when querying for payouts",
)


# ========================================================================
# argparser base classes
class CustomArgParser(NestedArgParser):
Expand Down Expand Up @@ -554,7 +566,15 @@ def print_args(arguments: Namespace, nested_args: dict):
SimArgParser = _ArgParser_PPSS
PredictoorArgParser = _ArgParser_PPSS_NETWORK
TraderArgParser = _ArgParser_APPROACH_PPSS_NETWORK
ClaimOceanArgParser = _ArgParser_PPSS


@enforce_types
class ClaimOceanArgParser(CustomArgParser, PPSS_Mixin, INCLUDE_PAUSED_Mixin):
def __init__(self, description: str, command_name: str):
super().__init__(description=description)
self.add_arguments_bulk(command_name, ["PPSS", "INCLUDE_PAUSED"])


ClaimRoseArgParser = _ArgParser_PPSS

# power tools
Expand Down Expand Up @@ -641,6 +661,19 @@ def __init__(self, description: str, command_name: str):
)


class PausePredictionsArgParser(
CustomArgParser, ACCOUNTS_Mixin, PPSS_Mixin, NETWORK_Mixin
):
# pylint: disable=unused-argument
def __init__(self, description: str, command_name: str):
super().__init__(description=description)

self.add_arguments_bulk(
command_name,
["ACCOUNTS", "PPSS", "NETWORK"],
)


# below, list each entry in defined_parsers in same order as HELP_LONG
defined_parsers = {
# main tools
Expand Down Expand Up @@ -690,6 +723,9 @@ def __init__(self, description: str, command_name: str):
"do_dashboard": PredictoorDashboardArgParser(
"Visualize Predictoor data", "dashboard"
),
"do_pause_predictions": PausePredictionsArgParser(
"Pause predictions for multiple contracts", "pause_predictions"
),
}


Expand Down
12 changes: 11 additions & 1 deletion pdr_backend/cli/cli_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from pdr_backend.ppss.ppss import PPSS
from pdr_backend.pred_submitter.deploy import deploy_pred_submitter_mgr_contract
from pdr_backend.predictoor.predictoor_agent import PredictoorAgent
from pdr_backend.publisher.pause_predictions import pause_predictions
from pdr_backend.publisher.publish_assets import publish_assets
from pdr_backend.sim.multisim_engine import MultisimEngine
from pdr_backend.sim.sim_dash import sim_dash
Expand Down Expand Up @@ -129,7 +130,8 @@ def do_claim_payouts(args, nested_args=None):
network="sapphire-mainnet",
nested_override_args=nested_args,
)
do_ocean_payout(ppss)
include_paused = args.include_paused
do_ocean_payout(ppss, include_paused=include_paused)


@enforce_types
Expand Down Expand Up @@ -308,3 +310,11 @@ def do_arima_plots(args, nested_args=None):
# pylint: disable=unused-argument
def do_dashboard(args, nested_args=None):
predictoor_dash(args.PPSS, args.debug_mode)


@enforce_types
# pylint: disable=unused-argument
def do_pause_predictions(args, nested_args=None):
ppss = args.PPSS
addresses = args.ACCOUNTS
pause_predictions(ppss.web3_pp, addresses)
31 changes: 30 additions & 1 deletion pdr_backend/cli/test/test_cli_module.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,14 +110,30 @@ class _NATIVE_TOKEN:
NATIVE_TOKEN = True


class _INCLUDE_PAUSED:
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I suggest renaming this to '_INCLUDE_PAUSED_FALSE'

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Easier to read, but not a mandatory change

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

But why would we add "false" to the end? I don't get it

include_paused = False


class _INCLUDE_PAUSED_TRUE:
include_paused = True


class _Base:
def __init__(self, *args, **kwargs):
pass


class MockArgParser_PPSS(_Base):
def parse_args(self):
class MockArgs(Namespace, _PPSS, _PPSS_OBJ):
class MockArgs(Namespace, _PPSS, _PPSS_OBJ, _INCLUDE_PAUSED):
pass

return MockArgs()


class MockArgParser_PPSS_WITH_PAUSED(_Base):
def parse_args(self):
class MockArgs(Namespace, _PPSS, _PPSS_OBJ, _INCLUDE_PAUSED_TRUE):
pass

return MockArgs()
Expand Down Expand Up @@ -265,6 +281,19 @@ def test_do_claim_payouts(monkeypatch):

do_claim_payouts(MockArgParser_PPSS().parse_args())
mock_f.assert_called()
# Verify it was called with include_paused=False by default
assert mock_f.call_args[1]["include_paused"] is False


@enforce_types
def test_do_claim_payouts_with_include_paused(monkeypatch):
mock_f = Mock()
monkeypatch.setattr(f"{_CLI_PATH}.do_ocean_payout", mock_f)

do_claim_payouts(MockArgParser_PPSS_WITH_PAUSED().parse_args())
mock_f.assert_called()
# Verify it was called with include_paused=True
assert mock_f.call_args[1]["include_paused"] is True


@enforce_types
Expand Down
17 changes: 17 additions & 0 deletions pdr_backend/contract/feed_contract.py
Original file line number Diff line number Diff line change
Expand Up @@ -386,6 +386,23 @@ def erc721_addr(self) -> str:
"""What's the ERC721 address from which this ERC20 feed was created?"""
return self.contract_instance.functions.getERC721Address().call()

def pause_predictions(self, wait_for_receipt=True):
"""Pause predictions for this feed. Can only be called by the owner."""
call_params = self.web3_pp.tx_call_params()
try:
tx = self.contract_instance.functions.pausePredictions().transact(
call_params
)
logger.info("Pause predictions: txhash=%s", tx.hex())

if not wait_for_receipt:
return tx

return self.config.w3.eth.wait_for_transaction_receipt(tx)
except Exception as e:
logger.error(e)
return None


# =========================================================================
# utilities for testing
Expand Down
14 changes: 10 additions & 4 deletions pdr_backend/payout/payout.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,9 @@ def request_payout_batches(


@enforce_types
def find_slots_and_payout_with_mgr(pred_submitter_mgr, ppss, query_old_slots=False):
def find_slots_and_payout_with_mgr(
pred_submitter_mgr, ppss, query_old_slots=False, include_paused=False
):
# we only need to query in one direction, since both predict on the same slots
# query_old_slots is by default false to improve bot speed,
# running the command line argument will set it to true
Expand All @@ -65,7 +67,9 @@ def find_slots_and_payout_with_mgr(pred_submitter_mgr, ppss, query_old_slots=Fal
logger.info("Starting payout")
wait_until_subgraph_syncs(web3_config, subgraph_url)
logger.info("Finding pending payouts")
pending_slots = query_pending_payouts(subgraph_url, up_addr, query_old_slots)
pending_slots = query_pending_payouts(
subgraph_url, up_addr, query_old_slots, include_paused
)
payout_batch_size = ppss.predictoor_ss.payout_batch_size
shared_slots = find_shared_slots(pending_slots, payout_batch_size)
unique_slots = count_unique_slots(shared_slots)
Expand All @@ -88,7 +92,9 @@ def find_slots_and_payout_with_mgr(pred_submitter_mgr, ppss, query_old_slots=Fal


@enforce_types
def do_ocean_payout(ppss: PPSS, check_network: bool = True):
def do_ocean_payout(
ppss: PPSS, check_network: bool = True, include_paused: bool = False
):
web3_config = ppss.web3_pp.web3_config
if check_network:
assert ppss.web3_pp.network == "sapphire-mainnet"
Expand All @@ -97,7 +103,7 @@ def do_ocean_payout(ppss: PPSS, check_network: bool = True):
pred_submitter_mgr_addr = ppss.predictoor_ss.pred_submitter_mgr
pred_submitter_mgr = PredSubmitterMgr(ppss.web3_pp, pred_submitter_mgr_addr)

find_slots_and_payout_with_mgr(pred_submitter_mgr, ppss, True)
find_slots_and_payout_with_mgr(pred_submitter_mgr, ppss, True, include_paused)


@enforce_types
Expand Down
48 changes: 48 additions & 0 deletions pdr_backend/publisher/pause_predictions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import logging
from typing import List

from enforce_typing import enforce_types

from pdr_backend.contract.feed_contract import FeedContract
from pdr_backend.ppss.web3_pp import Web3PP

logger = logging.getLogger("pause_predictions")


@enforce_types
def pause_predictions(web3_pp: Web3PP, contract_addresses: List[str]):
"""
Pause predictions for a list of feed contracts.

@arguments
web3_pp: Web3PP instance with network configuration
contract_addresses: List of contract addresses to pause
"""
logger.info("Pausing predictions on network = %s", web3_pp.network)
logger.info("Number of contracts to pause: %d", len(contract_addresses))

successful = []
failed = []

for address in contract_addresses:
logger.info("Pausing predictions for contract: %s", address)
try:
feed_contract = FeedContract(web3_pp, address)
tx = feed_contract.pause_predictions(wait_for_receipt=True)

if tx is not None:
logger.info("Successfully paused predictions for %s", address)
successful.append(address)
else:
logger.error("Failed to pause predictions for %s", address)
failed.append(address)
except Exception as e:
logger.error("Error pausing predictions for %s: %s", address, e)
failed.append(address)

logger.info("Done pausing predictions.")
logger.info("Successfully paused: %d", len(successful))
logger.info("Failed to pause: %d", len(failed))

if failed:
logger.warning("Failed contracts: %s", failed)
2 changes: 1 addition & 1 deletion pdr_backend/publisher/publish_assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

logger = logging.getLogger("publisher")
_CUT = Eth(0.2)
_RATE = Eth(3 / (1 + float(_CUT) + 0.001)) # token price
_RATE = Eth(1 / (1 + float(_CUT) + 0.001)) # token price
_S_PER_SUBSCRIPTION = 60 * 60 * 24


Expand Down
2 changes: 1 addition & 1 deletion pdr_backend/publisher/test/test_publish_assets.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def _test_barge(network, monkeypatch):
feed=ArgFeed("binance", "close", "ETH/USDT", "5m"),
trueval_submitter_addr="0xe2DD09d719Da89e5a3D0F2549c7E24566e947260",
feeCollector_addr="0xe2DD09d719Da89e5a3D0F2549c7E24566e947260",
rate=Eth(3 / (1 + 0.2 + 0.001)),
rate=Eth(1 / (1 + 0.2 + 0.001)),
cut=Eth(0.2),
web3_pp=web3_pp,
)
Expand Down
1 change: 1 addition & 0 deletions pdr_backend/subgraph/legacy/subgraph_slot.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def get_predict_slots_query(
slot_lte: {initial_slot}
slot_gte: {last_slot}
predictContract_in: {asset_ids_str}
predictContract_: {{paused: false}}
}}
) {{
id
Expand Down
2 changes: 1 addition & 1 deletion pdr_backend/subgraph/subgraph_consume_so_far.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def get_consume_so_far_per_contract(
while True:
query = """
{
predictSubscriptions(where: {timestamp_gt:%s, user_:{id: "%s"}}, first: %s, skip: %s){
predictSubscriptions(where: {timestamp_gt:%s, user_:{id: "%s"}, predictContract_: {paused: false}}, first: %s, skip: %s){
id
timestamp
user {
Expand Down
2 changes: 1 addition & 1 deletion pdr_backend/subgraph/subgraph_dfbuyer.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def get_consume_so_far(
while True: # pylint: disable=too-many-nested-blocks
query = """
{
predictContracts(skip:%s, first:%s){
predictContracts(where: {paused: false}, skip:%s, first:%s){
id
token{
orders(where: {createdTimestamp_gt:%s, consumer_in:["%s"]}){
Expand Down
2 changes: 1 addition & 1 deletion pdr_backend/subgraph/subgraph_feed_contracts.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def query_feed_contracts(
while True:
query = """
{
predictContracts(skip:%s, first:%s){
predictContracts(where: {paused: false}, skip:%s, first:%s){
id
token {
id
Expand Down
3 changes: 2 additions & 1 deletion pdr_backend/subgraph/subgraph_payout.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ def get_payout_query(
{
timestamp_gte: %s,
timestamp_lte: %s,
prediction_contains: "%s"
prediction_contains: "%s",
prediction_: {slot_: {predictContract_: {paused: false}}}
}
"""
% (start_ts, end_ts, asset_id)
Expand Down
Loading