From 8de28fdc515c3ca32725f7c6fdf006b1663c509f Mon Sep 17 00:00:00 2001 From: Jeannette Date: Fri, 28 Jun 2024 14:48:02 -0500 Subject: [PATCH] Added lifecycle, address and keys libraries --- .gitmodules => .gitmodulesOLD | 0 Dockerfile | 7 +- Pipfile | 20 ++ chief_keeper/chief_keeper.py | 413 +++++++++++++------------ chief_keeper/database/db_mainnet.json | 2 +- chief_keeper/utils/__init__.py | 5 + chief_keeper/utils/address_utils.py | 41 +++ chief_keeper/utils/keeper_lifecycle.py | 138 +++++++++ chief_keeper/utils/register_keys.py | 85 +++++ requirements.txt | 18 +- start-up.sh | 8 + 11 files changed, 518 insertions(+), 219 deletions(-) rename .gitmodules => .gitmodulesOLD (100%) create mode 100644 Pipfile create mode 100644 chief_keeper/utils/__init__.py create mode 100644 chief_keeper/utils/address_utils.py create mode 100644 chief_keeper/utils/keeper_lifecycle.py create mode 100644 chief_keeper/utils/register_keys.py create mode 100755 start-up.sh diff --git a/.gitmodules b/.gitmodulesOLD similarity index 100% rename from .gitmodules rename to .gitmodulesOLD diff --git a/Dockerfile b/Dockerfile index f15a333..91610fb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ # Use an official Python runtime as a parent image -FROM python:3.9-buster +FROM python:3.11-bullseye # Add user and group for running the application RUN groupadd -r keeper && useradd -d /home/keeper -m --no-log-init -r -g keeper keeper && \ apt-get update -y && \ - apt-get install -y jshon jq pkg-config openssl libssl-dev autoconf libtool libsecp256k1-dev && \ + apt-get install -y git jq pkg-config openssl libssl-dev autoconf libtool && \ apt-get clean && rm -rf /var/lib/apt/lists/* # Set the working directory in the container to /opt/keeper/chief-keeper @@ -14,8 +14,7 @@ WORKDIR /opt/keeper/chief-keeper COPY . . # Install submodules -RUN git config --global --add safe.directory /opt/keeper/chief-keeper && \ - git submodule update --init --recursive +RUN git config --global --add safe.directory /opt/keeper/chief-keeper # Install any needed packages specified in requirements.txt # First copy only the requirements.txt to leverage Docker cache diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..c4d50b9 --- /dev/null +++ b/Pipfile @@ -0,0 +1,20 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +[packages] +jsonnet = "*" +requests = "*" +web3 = "*" +eth-abi = "==2.1.1" +eth-utils = "<2.0.0,>=1.9.5" +eth-testrpc = "==1.3.0" +rlp = "==1.2.0" +tinydb = "*" +pytz = "==2017.3" + +[dev-packages] + +[requires] +python_version = "3.9" diff --git a/chief_keeper/chief_keeper.py b/chief_keeper/chief_keeper.py index b6c9553..dfa7d70 100644 --- a/chief_keeper/chief_keeper.py +++ b/chief_keeper/chief_keeper.py @@ -31,12 +31,14 @@ from chief_keeper.database import SimpleDatabase from chief_keeper.spell import DSSSpell -from pymaker import Address, web3_via_http -from pymaker.util import is_contract_at -from pymaker.gas import GeometricGasPrice -from pymaker.keys import register_keys -from pymaker.lifecycle import Lifecycle -from pymaker.deployment import DssDeployment +from .utils.keeper_lifecycle import Lifecycle +from. utils.register_keys import register_keys + +# from pymaker import Address, web3_via_http +# from pymaker.util import is_contract_at +# from pymaker.gas import GeometricGasPrice +# from pymaker.keys import register_keys +# from pymaker.deployment import DssDeployment HEALTHCHECK_FILE_PATH = "/tmp/health.log" BACKOFF_MAX_TIME = 120 @@ -112,18 +114,18 @@ def __init__(self, args: list, **kwargs): # Set the Ethereum address and register keys # self.web3.eth.defaultAccount = self.arguments.eth_from # register_keys(self.web3, self.arguments.eth_key) - self.our_address = Address(self.arguments.eth_from) - - if self.arguments.dss_deployment_file: - self.dss = DssDeployment.from_json( - web3=self.web3, - conf=open(self.arguments.dss_deployment_file, "r").read(), - ) - else: - self.dss = DssDeployment.from_network( - web3=self.web3, network=self.arguments.network - ) - self.logger.info(f"DS-Chief: {self.dss.ds_chief.address}") + # self.our_address = Address(self.arguments.eth_from) + + # if self.arguments.dss_deployment_file: + # self.dss = DssDeployment.from_json( + # web3=self.web3, + # conf=open(self.arguments.dss_deployment_file, "r").read(), + # ) + # else: + # self.dss = DssDeployment.from_network( + # web3=self.web3, network=self.arguments.network + # ) + # self.logger.info(f"DS-Chief: {self.dss.ds_chief.address}") self.deployment_block = self.arguments.chief_deployment_block self.max_errors = self.arguments.max_errors @@ -165,7 +167,7 @@ def _connect_to_node(self, rpc_url, rpc_timeout, node_type): self.logger.error(f"Error connecting to Ethereum node: {e}") return False else: - if _web3.isConnected(): + if _web3.is_connected(): self.web3 = _web3 self.node_type = node_type return self._configure_web3() @@ -188,197 +190,198 @@ def main(self): """Initialize the lifecycle and enter into the Keeper Lifecycle controller. Each function supplied by the lifecycle will accept a callback function that will be executed. + The lifecycle.on_startup() function will execute initilization steps. The lifecycle.on_block() function will enter into an infinite loop, but will gracefully shutdown if it recieves a SIGINT/SIGTERM signal. """ - + self.logger.info("Main") with Lifecycle(self.web3) as lifecycle: self.lifecycle = lifecycle - lifecycle.on_startup(self.check_deployment) - lifecycle.on_block(self.process_block) - - def check_deployment(self): - self.logger.info("") - self.logger.info("Please confirm the deployment details") - self.logger.info( - f"Keeper Balance: {self.web3.eth.getBalance(self.our_address.address) / (10**18)} ETH" - ) - self.logger.info(f"DS-Chief: {self.dss.ds_chief.address}") - self.logger.info(f"DS-Pause: {self.dss.pause.address}") - self.logger.info("") - self.initial_query() - - def initial_query(self): - """Updates a locally stored database with the DS-Chief state since its last update. - If a local database is not found, create one and query the DS-Chief state since its deployment. - """ - self.logger.info("") - self.logger.info( - "Querying DS-Chief state since last update ( !! Could take up to 15 minutes !! )" - ) - - self.database = SimpleDatabase( - self.web3, self.deployment_block, self.arguments.network, self.dss - ) - result = self.database.create() - - self.logger.info(result) - - def get_initial_tip(self, arguments) -> int: - try: - result = requests.get( - url='https://api.blocknative.com/gasprices/blockprices', - headers={ - 'Authorization': arguments.blocknative_api_key - }, - timeout=15 - ) - if result.ok and result.content: - confidence_80_tip = result.json().get('blockPrices')[0]['estimatedPrices'][3]['maxPriorityFeePerGas'] - self.logger.info(f"Using Blocknative 80% confidence tip {confidence_80_tip}") - self.logger.info(int(confidence_80_tip * GeometricGasPrice.GWEI)) - return int(confidence_80_tip * GeometricGasPrice.GWEI) - except Exception as e: - logging.error(str(e)) - - return int(1.5 * GeometricGasPrice.GWEI) - - - @healthy - def process_block(self): - """Callback called on each new block. If too many errors, terminate the keeper. - This is the entrypoint to the Keeper's monitoring logic - """ - try: - isConnected = self.web3.isConnected() - self.logger.info(f'web3 isConnected: {isConnected}') - - if self.errors >= self.max_errors: - self.lifecycle.terminate() - else: - self.check_hat() - self.check_eta() - except (TimeExhausted, Exception) as e: - self.logger.error(f"Error processing block: {e}") - self.errors += 1 - - def check_hat(self): - """Ensures the Hat is on the proposal (spell, EOA, multisig, etc) with the most approval. - - First, the local database is updated with proposal addresses (yays) that have been `etched` in DSChief between - the last block reviewed and the most recent block receieved. Next, it simply traverses through each address, - checking if its approval has surpased the current Hat. If it has, it will `lift` the hat. - - If the current or new hat hasn't been casted nor plotted in the pause, it will `schedule` the spell - """ - blockNumber = self.web3.eth.blockNumber - self.logger.info(f"Checking Hat on block {blockNumber}") - - try: - self.database.update_db_yays(blockNumber) - except (TimeExhausted, Exception) as e: - self.logger.error(f"Error updating database yays: {e}") - self.errors += 1 - return - - yays = self.database.db.get(doc_id=2)["yays"] - - hat = self.dss.ds_chief.get_hat().address - hatApprovals = self.dss.ds_chief.get_approvals(hat) - - contender, highestApprovals = hat, hatApprovals - - gas_strategy = GeometricGasPrice( - web3=self.web3, - initial_price=None, - initial_tip=self.get_initial_tip(self.arguments), - every_secs=180 - ) - - for yay in yays: - contenderApprovals = self.dss.ds_chief.get_approvals(yay) - if contenderApprovals > highestApprovals: - contender = yay - highestApprovals = contenderApprovals - - if contender != hat: - self.logger.info(f"Lifting hat") - self.logger.info(f"Old hat ({hat}) with Approvals {hatApprovals}") - self.logger.info(f"New hat ({contender}) with Approvals {highestApprovals}") - self.dss.ds_chief.lift(Address(contender)).transact( - gas_strategy=gas_strategy - ) - else: - self.logger.info(f"Current hat ({hat}) with Approvals {hatApprovals}") - - # Read the hat; either is equivalent to the contender or old hat - hatNew = self.dss.ds_chief.get_hat().address - if hatNew != hat: - self.logger.info(f"Confirmed ({contender}) now has the hat") - - spell = ( - DSSSpell(self.web3, Address(hatNew)) - if is_contract_at(self.web3, Address(hatNew)) - else None - ) - - # Schedules spells that haven't been scheduled nor casted - if spell is not None: - # Functional with DSSSpells but not DSSpells (not compatiable with DSPause) - if spell.done() == False and self.database.get_eta_inUnix(spell) == 0: - self.logger.info(f"Scheduling spell ({yay})") - spell.schedule().transact(gas_strategy=gas_strategy) - else: - self.logger.warning( - f"Spell is an EOA or 0x0, so keeper will not attempt to call schedule()" - ) - - def check_eta(self): - """Cast spells that meet their schedule. - - First, the local database is updated with spells that have been scheduled between the last block - reviewed and the most recent block receieved. Next, it simply traverses through each spell address, - checking if its schedule has been reached/passed. If it has, it attempts to `cast` the spell. - """ - blockNumber = self.web3.eth.blockNumber - now = self.web3.eth.getBlock(blockNumber).timestamp - self.logger.info(f"Checking scheduled spells on block {blockNumber}") - - self.database.update_db_etas(blockNumber) - etas = self.database.db.get(doc_id=3)["upcoming_etas"] - - yays = list(etas.keys()) - - for yay in yays: - if etas[yay] <= now: - spell = ( - DSSSpell(self.web3, Address(yay)) - if is_contract_at(self.web3, Address(yay)) - else None - ) - - if spell is not None: - gas_strategy = GeometricGasPrice( - web3=self.web3, - initial_price=None, - initial_tip=self.get_initial_tip(self.arguments), - every_secs=180 - ) - if spell.done() == False: - self.logger.info(f"Casting spell ({spell.address.address})") - receipt = spell.cast().transact(gas_strategy=gas_strategy) - - if receipt is None or receipt.successful == True: - del etas[yay] - else: - del etas[yay] - else: - self.logger.warning( - f"Spell is an EOA or 0x0, so keeper will not attempt to call cast()" - ) - del etas[yay] - - self.database.db.update({"upcoming_etas": etas}, doc_ids=[3]) + # lifecycle.on_startup(self.check_deployment) + # lifecycle.on_block(self.process_block) + + # def check_deployment(self): + # self.logger.info("") + # self.logger.info("Please confirm the deployment details") + # self.logger.info( + # f"Keeper Balance: {self.web3.eth.getBalance(self.our_address.address) / (10**18)} ETH" + # ) + # self.logger.info(f"DS-Chief: {self.dss.ds_chief.address}") + # self.logger.info(f"DS-Pause: {self.dss.pause.address}") + # self.logger.info("") + # self.initial_query() + + # def initial_query(self): + # """Updates a locally stored database with the DS-Chief state since its last update. + # If a local database is not found, create one and query the DS-Chief state since its deployment. + # """ + # self.logger.info("") + # self.logger.info( + # "Querying DS-Chief state since last update ( !! Could take up to 15 minutes !! )" + # ) + + # self.database = SimpleDatabase( + # self.web3, self.deployment_block, self.arguments.network, self.dss + # ) + # result = self.database.create() + + # self.logger.info(result) + + # def get_initial_tip(self, arguments) -> int: + # try: + # result = requests.get( + # url='https://api.blocknative.com/gasprices/blockprices', + # headers={ + # 'Authorization': arguments.blocknative_api_key + # }, + # timeout=15 + # ) + # if result.ok and result.content: + # confidence_80_tip = result.json().get('blockPrices')[0]['estimatedPrices'][3]['maxPriorityFeePerGas'] + # self.logger.info(f"Using Blocknative 80% confidence tip {confidence_80_tip}") + # self.logger.info(int(confidence_80_tip * GeometricGasPrice.GWEI)) + # return int(confidence_80_tip * GeometricGasPrice.GWEI) + # except Exception as e: + # logging.error(str(e)) + + # return int(1.5 * GeometricGasPrice.GWEI) + + + # @healthy + # def process_block(self): + # """Callback called on each new block. If too many errors, terminate the keeper. + # This is the entrypoint to the Keeper's monitoring logic + # """ + # try: + # isConnected = self.web3.isConnected() + # self.logger.info(f'web3 isConnected: {isConnected}') + + # if self.errors >= self.max_errors: + # self.lifecycle.terminate() + # else: + # self.check_hat() + # self.check_eta() + # except (TimeExhausted, Exception) as e: + # self.logger.error(f"Error processing block: {e}") + # self.errors += 1 + + # def check_hat(self): + # """Ensures the Hat is on the proposal (spell, EOA, multisig, etc) with the most approval. + + # First, the local database is updated with proposal addresses (yays) that have been `etched` in DSChief between + # the last block reviewed and the most recent block receieved. Next, it simply traverses through each address, + # checking if its approval has surpased the current Hat. If it has, it will `lift` the hat. + + # If the current or new hat hasn't been casted nor plotted in the pause, it will `schedule` the spell + # """ + # blockNumber = self.web3.eth.blockNumber + # self.logger.info(f"Checking Hat on block {blockNumber}") + + # try: + # self.database.update_db_yays(blockNumber) + # except (TimeExhausted, Exception) as e: + # self.logger.error(f"Error updating database yays: {e}") + # self.errors += 1 + # return + + # yays = self.database.db.get(doc_id=2)["yays"] + + # hat = self.dss.ds_chief.get_hat().address + # hatApprovals = self.dss.ds_chief.get_approvals(hat) + + # contender, highestApprovals = hat, hatApprovals + + # gas_strategy = GeometricGasPrice( + # web3=self.web3, + # initial_price=None, + # initial_tip=self.get_initial_tip(self.arguments), + # every_secs=180 + # ) + + # for yay in yays: + # contenderApprovals = self.dss.ds_chief.get_approvals(yay) + # if contenderApprovals > highestApprovals: + # contender = yay + # highestApprovals = contenderApprovals + + # if contender != hat: + # self.logger.info(f"Lifting hat") + # self.logger.info(f"Old hat ({hat}) with Approvals {hatApprovals}") + # self.logger.info(f"New hat ({contender}) with Approvals {highestApprovals}") + # self.dss.ds_chief.lift(Address(contender)).transact( + # gas_strategy=gas_strategy + # ) + # else: + # self.logger.info(f"Current hat ({hat}) with Approvals {hatApprovals}") + + # # Read the hat; either is equivalent to the contender or old hat + # hatNew = self.dss.ds_chief.get_hat().address + # if hatNew != hat: + # self.logger.info(f"Confirmed ({contender}) now has the hat") + + # spell = ( + # DSSSpell(self.web3, Address(hatNew)) + # if is_contract_at(self.web3, Address(hatNew)) + # else None + # ) + + # # Schedules spells that haven't been scheduled nor casted + # if spell is not None: + # # Functional with DSSSpells but not DSSpells (not compatiable with DSPause) + # if spell.done() == False and self.database.get_eta_inUnix(spell) == 0: + # self.logger.info(f"Scheduling spell ({yay})") + # spell.schedule().transact(gas_strategy=gas_strategy) + # else: + # self.logger.warning( + # f"Spell is an EOA or 0x0, so keeper will not attempt to call schedule()" + # ) + + # def check_eta(self): + # """Cast spells that meet their schedule. + + # First, the local database is updated with spells that have been scheduled between the last block + # reviewed and the most recent block receieved. Next, it simply traverses through each spell address, + # checking if its schedule has been reached/passed. If it has, it attempts to `cast` the spell. + # """ + # blockNumber = self.web3.eth.blockNumber + # now = self.web3.eth.getBlock(blockNumber).timestamp + # self.logger.info(f"Checking scheduled spells on block {blockNumber}") + + # self.database.update_db_etas(blockNumber) + # etas = self.database.db.get(doc_id=3)["upcoming_etas"] + + # yays = list(etas.keys()) + + # for yay in yays: + # if etas[yay] <= now: + # spell = ( + # DSSSpell(self.web3, Address(yay)) + # if is_contract_at(self.web3, Address(yay)) + # else None + # ) + + # if spell is not None: + # gas_strategy = GeometricGasPrice( + # web3=self.web3, + # initial_price=None, + # initial_tip=self.get_initial_tip(self.arguments), + # every_secs=180 + # ) + # if spell.done() == False: + # self.logger.info(f"Casting spell ({spell.address.address})") + # receipt = spell.cast().transact(gas_strategy=gas_strategy) + + # if receipt is None or receipt.successful == True: + # del etas[yay] + # else: + # del etas[yay] + # else: + # self.logger.warning( + # f"Spell is an EOA or 0x0, so keeper will not attempt to call cast()" + # ) + # del etas[yay] + + # self.database.db.update({"upcoming_etas": etas}, doc_ids=[3]) if __name__ == "__main__": diff --git a/chief_keeper/database/db_mainnet.json b/chief_keeper/database/db_mainnet.json index c06a23a..c96afae 100644 --- a/chief_keeper/database/db_mainnet.json +++ b/chief_keeper/database/db_mainnet.json @@ -1 +1 @@ -{"_default": {"1": {"last_block_checked_for_yays": 19968781}, "2": {"yays": ["0x2A8E8588ae9d420656c49C910C2c820450a01F95", "0xFc5154dc5F980A3377374864fbE7a25AFc9Fe5ED", "0xD8CAe22CDC75ab61Ec7663a355D043De4277dC1E", "0xa3F971B97E1e8d98061e58809748d525D8AE295F", "0x77bdCbD18A064B97a71F3982D6ae981EF32e72D8", "0x2EaCCAC62E9D9a55417c35df3491Ec6F6343e311", "0xd79E320fDF9a738E9ADa8C6644563c3E1923B33d", "0xcf50ef07DebcB608b8b1133A13bF5AC823E47D9D", "0xE3d9458cf864C9028363464c7D45C66078F54240", "0xAa0087F0AB8ffD4A8F72Bb1bb09EE7b046566412", "0x0aF27f917864a4f05D18CeeF22F0d23CD73C10aD", "0x74c63Bb8610C7a7538733ECf915Fa217605C01a5", "0xf282371EA8eFDE8e47D78131c3Af70a6e2A28617", "0xe9a7e8AB8e2b8118eC1f108a370b7151635D06B7", "0xeDc016ACE4618284F289696545760AC0dB5bc79B", "0x22E9B678daF00B41f2127837A521F92f18802674", "0x46e5E4502f99867405fC239Ca695f1569ef4d3e6", "0x3FfE82e0Ea5ABC48c3C4cA9ceF2F9C81515e800b", "0x899E41f14eE23BF3Dca12bDe066F35994DC4e7bc", "0x3Ef533927c4FE49B3dA53aC3241322eB98F9717d", "0xae220fa15DEd4e6e63310D2f6C8d3DFAaF4F8A1b", "0x6fBe6d79BD2aeEfA4B6929bD47babf236023902e", "0x15732a23db6A0D54E3240F72913950c8c9a64c20", "0xB81ddbFF61420A181bB725D5F06cA99486Bb4e6a", "0xD665104a2800FaDFd232961d718E2108e8ec6BDD", "0x3920f14096E89b1bc2f4Ebf3A847c913170a1F92", "0xeC17e6122578A6E5B92e904aD46b03Fe07568359", "0x28498dDD408001A7011DA2edE4136104e035f2F4", "0xBd144df1031f1718d50715e0ade8856C7b06AaE1", "0xB6b375aCebC0068F7414Cc6c81f4392d0760c845", "0x6fC17C824f3E39dF6D21b2740B676755E71466d6", "0xf8587266d56F3e3480F693a0AD2e36E6E2c12cDA", "0x5a53768b01AEb799572998819638C63c8EEEea52", "0xC1136F15C605222793F60E328FE3F01Ec2c3F264", "0xca988c4D38d562DDeDEbFb7a44ec5a72a6698f5c", "0xbD01665f3768D793DA1Ffc4DCD70114B31A3954C", "0x784e85ef0f6e05Ff5252629c2e2649c89bdC88Da", "0xE460E2588d8cb9c460F63Bd2f746724bD0e55Aa0", "0x25222057EF01A3BDe7F59d10695da6dAe42C531A", "0x74eD57e3c12Ee6Eec8E583BFFF77Ea9D33A602e2", "0x20B4a73B34Fb2893D60D24c1b3996cbC2664F7cC", "0xE75A8Ae8075425b4D2F8f53B55996afb9157D980", "0xaE071879962A36d338ed89Bb0fF61D0A5b8C6201", "0x31a5577D98251c6C08CAcfEE6faE2e2D7A040b50", "0x2bd13D445aD7a40c393FAA93b43b24AA35DcD973", "0xBA57238e70116fB61fdE07387c6dD79AebF92E5E", "0xA453B4C7BA03FBea2a1ddAD12Bb3Bc5dC41cEee8", "0x58339aB7f27e66768713aF28E8e539Ca56432423", "0x87471BEd520edf4B54E0091Ff754E3E69666A9D6", "0xceafE30A12d00D35beCdE807c5e360ce5124A750", "0xB5010b4bC4e506B933C2f0aca7B8214089167e33", "0x3d6F4c40A2C53964ddB4D63fBB50FA854e048499", "0x5abf3e4C8d144DB9d3603A76e524671040Aa4859", "0x38e13DaF8024f682deA2022c849152BbD9b071DF", "0x7a87C5dd42Fed47b85e23C3F5C02b735FD69A38F", "0xFd27AE89d4976Bb11d4FB7e0e215Eced997F13a6", "0xcD0E28B9DFCc1ee71d9D6834EF66C1E103E94B27", "0xa5e85cAC3Fb448E16699C97DA86124bD288017d6", "0x606395F1B167AA73134b8A2B34C25Bb7D0564920", "0xCa9f54957c61b58843628051561896d0557D42c3", "0x7aB726CdF93443302CC4c90e83cb7ce4210bb724", "0xf00Ad3314E08e0E8E713D8155E3c867580cD811b", "0x6571e4B3432e856CdA8fae95dB3e23B573019e8B", "0x3DcD037bB1D3A898f6CbbE136ae2e1959FF74e3c", "0x6E2850E425d89836F1B10256C2C9E23415dc47F2", "0x9F978435542Ee71e8b2E8f1f51bB808dCF307D41", "0x8794E3EAeBeEbdFD9d0C601753f7Cd377aC69280", "0xa3104Af92F7C996f7FD73F6b87F16bE420d76c71", "0x17bcD46C9f85888E6169C54336fe3E91c604F1D5", "0xe01F7E3Ac096904EaC6Ac4497Ad2ab96f647bA87", "0xeb1F2F9dA1E1f932Ab11bc00F70A5Cd3607ef628", "0xc0F05F0E3DA5ff76fEC6C3Def889103a3709bfbF", "0xde4000CB884B237eFBd6f793584701230E1c45B3", "0x73b474fE6ff6844222471bC4CFD4Bd7A518B16C4", "0x81A64CE48C01d252B90E3d5531D448e446Bf3e42", "0x45e33477CD5aB5CeFA708cd977c1e398061D61cD", "0x30899738762d84343f615DE62c8D1283Cc3364da", "0x79Ba240EDc34f81DD56Ff153470e2be3DA91e88a", "0x4C3c8aCA2758799D697Ce83e63fdcCe0D52b3cd9", "0x483574D869BC34D2131032e65a3114A901928E91", "0xe7BbC8Fea57A92fC307D650D78e5481B25ccedff", "0x1EAD8a37d189a67B1736020131d4890833cF9103", "0x414c6e043c8580cA077250045a1C04b4745ac236", "0x4436A797F8E1cD87F3c674865Bc3eA1474C3B0B2", "0x043c52c8ff76C088646c8d2630eDdF1A8e33bA4C", "0x168Da8AFc9D925456c087999ED0f8041a2b7DeFA", "0xFA635D9093C2dd637CF19d48Df6EA1DBde56DDB1", "0xF44113760c4f70aFeEb412C63bC713B13E6e202E", "0xF3aB5E963E7c09E205927b9ba498Bb09afe3BC22", "0x902f009d4dE4a7828284B04b364dD43F00E51A02", "0xF267EFDDA842539a2cAff990259395188a86b813", "0xDD4Aa99077C5e976AFc22060EEafBBd1ba34eae9", "0x94c19E029F5A1A115F3B99aD87da24D33E60A0E1", "0x333c0501182170c5002219380ded6b12C338E272", "0x7A87aCB1f92c50297239EF9B0Ef9387105Bd4Fc5", "0x0000000000000000000000000000000000000000", "0x58401b64CA6b91E346c87B057254F040990c4F98", "0x6b8b3993cFB253968894C8EcB430CaF2455b51Aa", "0x437F5aAF195C97a01f85e672bb8e371484D96C57", "0x3ee0C26aE7aa8cCc759e4Ee4f1E6F2C16220e5f6", "0xD3F96B8Ffbf21033F5A6210C6349598AAdBd1152", "0x2f34BB0FE10BCb5652390FD0bA3Af7879BcA4b62", "0xb242159a9182e7FE0b72Fc035b336cFE060381B3", "0xA6dFB3E92BBD3Ae9098fda9AE3DDE4c727ec618a", "0x77583dc3D6192D55eF642060e82Af1D7A34BC142", "0xB394eC56AbD78c9264438168F8a8E1Bd85F1f0Ae"]}, "3": {"upcoming_etas": {}}}} \ No newline at end of file +{"_default": {"1": {"last_block_checked_for_yays": 20169761}, "2": {"yays": ["0x2A8E8588ae9d420656c49C910C2c820450a01F95", "0xFc5154dc5F980A3377374864fbE7a25AFc9Fe5ED", "0xD8CAe22CDC75ab61Ec7663a355D043De4277dC1E", "0xa3F971B97E1e8d98061e58809748d525D8AE295F", "0x77bdCbD18A064B97a71F3982D6ae981EF32e72D8", "0x2EaCCAC62E9D9a55417c35df3491Ec6F6343e311", "0xd79E320fDF9a738E9ADa8C6644563c3E1923B33d", "0xcf50ef07DebcB608b8b1133A13bF5AC823E47D9D", "0xE3d9458cf864C9028363464c7D45C66078F54240", "0xAa0087F0AB8ffD4A8F72Bb1bb09EE7b046566412", "0x0aF27f917864a4f05D18CeeF22F0d23CD73C10aD", "0x74c63Bb8610C7a7538733ECf915Fa217605C01a5", "0xf282371EA8eFDE8e47D78131c3Af70a6e2A28617", "0xe9a7e8AB8e2b8118eC1f108a370b7151635D06B7", "0xeDc016ACE4618284F289696545760AC0dB5bc79B", "0x22E9B678daF00B41f2127837A521F92f18802674", "0x46e5E4502f99867405fC239Ca695f1569ef4d3e6", "0x3FfE82e0Ea5ABC48c3C4cA9ceF2F9C81515e800b", "0x899E41f14eE23BF3Dca12bDe066F35994DC4e7bc", "0x3Ef533927c4FE49B3dA53aC3241322eB98F9717d", "0xae220fa15DEd4e6e63310D2f6C8d3DFAaF4F8A1b", "0x6fBe6d79BD2aeEfA4B6929bD47babf236023902e", "0x15732a23db6A0D54E3240F72913950c8c9a64c20", "0xB81ddbFF61420A181bB725D5F06cA99486Bb4e6a", "0xD665104a2800FaDFd232961d718E2108e8ec6BDD", "0x3920f14096E89b1bc2f4Ebf3A847c913170a1F92", "0xeC17e6122578A6E5B92e904aD46b03Fe07568359", "0x28498dDD408001A7011DA2edE4136104e035f2F4", "0xBd144df1031f1718d50715e0ade8856C7b06AaE1", "0xB6b375aCebC0068F7414Cc6c81f4392d0760c845", "0x6fC17C824f3E39dF6D21b2740B676755E71466d6", "0xf8587266d56F3e3480F693a0AD2e36E6E2c12cDA", "0x5a53768b01AEb799572998819638C63c8EEEea52", "0xC1136F15C605222793F60E328FE3F01Ec2c3F264", "0xca988c4D38d562DDeDEbFb7a44ec5a72a6698f5c", "0xbD01665f3768D793DA1Ffc4DCD70114B31A3954C", "0x784e85ef0f6e05Ff5252629c2e2649c89bdC88Da", "0xE460E2588d8cb9c460F63Bd2f746724bD0e55Aa0", "0x25222057EF01A3BDe7F59d10695da6dAe42C531A", "0x74eD57e3c12Ee6Eec8E583BFFF77Ea9D33A602e2", "0x20B4a73B34Fb2893D60D24c1b3996cbC2664F7cC", "0xE75A8Ae8075425b4D2F8f53B55996afb9157D980", "0xaE071879962A36d338ed89Bb0fF61D0A5b8C6201", "0x31a5577D98251c6C08CAcfEE6faE2e2D7A040b50", "0x2bd13D445aD7a40c393FAA93b43b24AA35DcD973", "0xBA57238e70116fB61fdE07387c6dD79AebF92E5E", "0xA453B4C7BA03FBea2a1ddAD12Bb3Bc5dC41cEee8", "0x58339aB7f27e66768713aF28E8e539Ca56432423", "0x87471BEd520edf4B54E0091Ff754E3E69666A9D6", "0xceafE30A12d00D35beCdE807c5e360ce5124A750", "0xB5010b4bC4e506B933C2f0aca7B8214089167e33", "0x3d6F4c40A2C53964ddB4D63fBB50FA854e048499", "0x5abf3e4C8d144DB9d3603A76e524671040Aa4859", "0x38e13DaF8024f682deA2022c849152BbD9b071DF", "0x7a87C5dd42Fed47b85e23C3F5C02b735FD69A38F", "0xFd27AE89d4976Bb11d4FB7e0e215Eced997F13a6", "0xcD0E28B9DFCc1ee71d9D6834EF66C1E103E94B27", "0xa5e85cAC3Fb448E16699C97DA86124bD288017d6", "0x606395F1B167AA73134b8A2B34C25Bb7D0564920", "0xCa9f54957c61b58843628051561896d0557D42c3", "0x7aB726CdF93443302CC4c90e83cb7ce4210bb724", "0xf00Ad3314E08e0E8E713D8155E3c867580cD811b", "0x6571e4B3432e856CdA8fae95dB3e23B573019e8B", "0x3DcD037bB1D3A898f6CbbE136ae2e1959FF74e3c", "0x6E2850E425d89836F1B10256C2C9E23415dc47F2", "0x9F978435542Ee71e8b2E8f1f51bB808dCF307D41", "0x8794E3EAeBeEbdFD9d0C601753f7Cd377aC69280", "0xa3104Af92F7C996f7FD73F6b87F16bE420d76c71", "0x17bcD46C9f85888E6169C54336fe3E91c604F1D5", "0xe01F7E3Ac096904EaC6Ac4497Ad2ab96f647bA87", "0xeb1F2F9dA1E1f932Ab11bc00F70A5Cd3607ef628", "0xc0F05F0E3DA5ff76fEC6C3Def889103a3709bfbF", "0xde4000CB884B237eFBd6f793584701230E1c45B3", "0x73b474fE6ff6844222471bC4CFD4Bd7A518B16C4", "0x81A64CE48C01d252B90E3d5531D448e446Bf3e42", "0x45e33477CD5aB5CeFA708cd977c1e398061D61cD", "0x30899738762d84343f615DE62c8D1283Cc3364da", "0x79Ba240EDc34f81DD56Ff153470e2be3DA91e88a", "0x4C3c8aCA2758799D697Ce83e63fdcCe0D52b3cd9", "0x483574D869BC34D2131032e65a3114A901928E91", "0xe7BbC8Fea57A92fC307D650D78e5481B25ccedff", "0x1EAD8a37d189a67B1736020131d4890833cF9103", "0x414c6e043c8580cA077250045a1C04b4745ac236", "0x4436A797F8E1cD87F3c674865Bc3eA1474C3B0B2", "0x043c52c8ff76C088646c8d2630eDdF1A8e33bA4C", "0x168Da8AFc9D925456c087999ED0f8041a2b7DeFA", "0xFA635D9093C2dd637CF19d48Df6EA1DBde56DDB1", "0xF44113760c4f70aFeEb412C63bC713B13E6e202E", "0xF3aB5E963E7c09E205927b9ba498Bb09afe3BC22", "0x902f009d4dE4a7828284B04b364dD43F00E51A02", "0xF267EFDDA842539a2cAff990259395188a86b813", "0xDD4Aa99077C5e976AFc22060EEafBBd1ba34eae9", "0x94c19E029F5A1A115F3B99aD87da24D33E60A0E1", "0x333c0501182170c5002219380ded6b12C338E272", "0x7A87aCB1f92c50297239EF9B0Ef9387105Bd4Fc5", "0x0000000000000000000000000000000000000000", "0x58401b64CA6b91E346c87B057254F040990c4F98", "0x6b8b3993cFB253968894C8EcB430CaF2455b51Aa", "0x437F5aAF195C97a01f85e672bb8e371484D96C57", "0x3ee0C26aE7aa8cCc759e4Ee4f1E6F2C16220e5f6", "0xD3F96B8Ffbf21033F5A6210C6349598AAdBd1152", "0x2f34BB0FE10BCb5652390FD0bA3Af7879BcA4b62", "0xb242159a9182e7FE0b72Fc035b336cFE060381B3", "0xA6dFB3E92BBD3Ae9098fda9AE3DDE4c727ec618a", "0x77583dc3D6192D55eF642060e82Af1D7A34BC142", "0xB394eC56AbD78c9264438168F8a8E1Bd85F1f0Ae", "0x7B55617f7F04F7B45eE865fF9066469Fbe28a632", "0x622Ad624491a01a2a6beAD916C3Ca3B90BcA0854"]}, "3": {"upcoming_etas": {}}}} \ No newline at end of file diff --git a/chief_keeper/utils/__init__.py b/chief_keeper/utils/__init__.py new file mode 100644 index 0000000..e85b9f4 --- /dev/null +++ b/chief_keeper/utils/__init__.py @@ -0,0 +1,5 @@ +# __init__.py in chief_keeper and chief_keeper/utils directories + +""" +Initializer for the package. +""" diff --git a/chief_keeper/utils/address_utils.py b/chief_keeper/utils/address_utils.py new file mode 100644 index 0000000..7f456d2 --- /dev/null +++ b/chief_keeper/utils/address_utils.py @@ -0,0 +1,41 @@ +# This utility provides an Address class that validates Ethereum addresses and +# converts them to checksum addresses. The Address class ensures that only valid +# Ethereum addresses are used and provides methods for comparison and hashing. +# +# Class: +# - Address: Validates and normalizes Ethereum addresses to checksum format. +# +# Methods: +# - is_valid_address: Validates if the provided address is a checksum address. +# - __str__: Returns the checksum address as a string. +# - __eq__: Compares two Address instances for equality. +# - __hash__: Provides a hash value for the Address instance. +# +# Example: +# address = Address('0x32Be343B94f860124dC4fEe278FDCBD38C102D88') +# print(address) # Outputs the checksum address + + + +from eth_utils import is_checksum_address, to_checksum_address + +class Address: + def __init__(self, address: str): + if not self.is_valid_address(address): + raise ValueError(f"Invalid Ethereum address: {address}") + self.address = to_checksum_address(address) + + @staticmethod + def is_valid_address(address: str) -> bool: + return is_checksum_address(address) + + def __str__(self): + return self.address + + def __eq__(self, other): + if isinstance(other, Address): + return self.address == other.address + return False + + def __hash__(self): + return hash(self.address) diff --git a/chief_keeper/utils/keeper_lifecycle.py b/chief_keeper/utils/keeper_lifecycle.py new file mode 100644 index 0000000..32a0f3f --- /dev/null +++ b/chief_keeper/utils/keeper_lifecycle.py @@ -0,0 +1,138 @@ +# This module provides a simplified Lifecycle class for managing the lifecycle +# of keeper operations. It includes functionality for registering startup and +# block processing callbacks, handling termination signals, and monitoring +# blockchain events. +# +# Class: +# - Lifecycle: Manages the lifecycle of keeper operations. +# +# Methods: +# - on_startup: Registers a callback to be run on keeper startup. +# - on_block: Registers a callback to be run for each new block received. +# - terminate: Initiates a graceful shutdown of the keeper. +# +# Usage: +# 1. Initialize a Lifecycle instance with a Web3 instance. +# 2. Use the on_startup method to register a startup callback. +# 3. Use the on_block method to register a block processing callback. +# 4. The class handles SIGINT and SIGTERM signals for graceful shutdown. +# +# Example: +# web3 = Web3(Web3.HTTPProvider('http://localhost:8545')) +# with Lifecycle(web3) as lifecycle: +# lifecycle.on_startup(startup_function) +# lifecycle.on_block(block_processing_function) + + +import logging +import threading +import signal +import time +import datetime +import pytz +from typing import Callable, Optional +from web3 import Web3 +from web3.middleware import geth_poa_middleware + +class Lifecycle: + """Simplified Lifecycle for keeper operations.""" + logger = logging.getLogger() + + def __init__(self, web3: Optional[Web3] = None): + self.web3 = web3 + + if self.web3: + # Add PoA middleware for compatibility with certain chains + self.web3.middleware_onion.inject(geth_poa_middleware, layer=0) + + self.startup_function: Optional[Callable] = None + self.block_function: Optional[Callable] = None + + self.terminated_internally = False + self.terminated_externally = False + self.fatal_termination = False + self._last_block_time: Optional[datetime.datetime] = None + self._on_block_callback: Optional[Callable] = None + + def __enter__(self) -> 'Lifecycle': + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self.logger.info("Shutting down the keeper") + + def on_startup(self, callback: Callable) -> None: + """Register the specified callback to be run on keeper startup.""" + assert callable(callback) + assert self.startup_function is None + self.startup_function = callback + self.logger.info("Executing keeper startup logic") + self.startup_function() + + def on_block(self, callback: Callable) -> None: + """Register the specified callback to be run for each new block received by the node.""" + assert callable(callback) + assert self.web3 is not None + assert self.block_function is None + self.block_function = callback + self._start_watching_blocks() + + def terminate(self, message: Optional[str] = None) -> None: + if message: + self.logger.warning(message) + self.terminated_internally = True + + def _sigint_sigterm_handler(self, sig, frame) -> None: + if self.terminated_externally: + self.logger.warning("Graceful keeper termination due to SIGINT/SIGTERM already in progress") + else: + self.logger.warning("Keeper received SIGINT/SIGTERM signal, will terminate gracefully") + self.terminated_externally = True + + def _start_watching_blocks(self) -> None: + def new_block_callback(block_hash): + self._last_block_time = datetime.datetime.now(tz=pytz.UTC) + block = self.web3.eth.get_block(block_hash) + block_number = block['number'] + if not self.web3.eth.syncing: + max_block_number = self.web3.eth.block_number + if block_number >= max_block_number: + def on_start(): + self.logger.debug(f"Processing block #{block_number} ({block_hash.hex()})") + + def on_finish(): + self.logger.debug(f"Finished processing block #{block_number} ({block_hash.hex()})") + + if not self.terminated_internally and not self.terminated_externally and not self.fatal_termination: + if not self.block_function(): + self.logger.debug(f"Ignoring block #{block_number} ({block_hash.hex()}), as previous callback is still running") + else: + self.logger.debug(f"Ignoring block #{block_number} as keeper is already terminating") + else: + self.logger.debug(f"Ignoring block #{block_number} ({block_hash.hex()}), as there is already block #{max_block_number} available") + else: + self.logger.info(f"Ignoring block #{block_number} ({block_hash.hex()}), as the node is syncing") + + def new_block_watch(): + event_filter = self.web3.eth.filter('latest') + logging.debug(f"Created event filter: {event_filter}") + while True: + try: + for event in event_filter.get_new_entries(): + new_block_callback(event) + except Exception as ex: + self.logger.warning(f"Node dropped event emitter; recreating latest block filter: {ex}") + event_filter = self.web3.eth.filter('latest') + finally: + time.sleep(1) + + if self.block_function: + block_filter = threading.Thread(target=new_block_watch, daemon=True) + block_filter.start() + self.logger.info("Watching for new blocks") + + def _main_loop(self) -> None: + signal.signal(signal.SIGINT, self._sigint_sigterm_handler) + signal.signal(signal.SIGTERM, self._sigint_sigterm_handler) + + while not self.terminated_internally and not self.terminated_externally: + time.sleep(1) diff --git a/chief_keeper/utils/register_keys.py b/chief_keeper/utils/register_keys.py new file mode 100644 index 0000000..cd98d1a --- /dev/null +++ b/chief_keeper/utils/register_keys.py @@ -0,0 +1,85 @@ +# This module provides functionality for securely managing Ethereum private keys +# and configuring the Web3.py instance to use these keys for signing and sending +# transactions. It includes functions to register multiple keys from key files, +# decrypt them using passwords, and store them for automatic transaction signing. +# +# Functions: +# - register_keys: Registers multiple keys. +# - register_key: Registers a single key. +# - register_key_file: Registers a key from a file, decrypting it with a password. +# - get_private_key: Retrieves a private key as a hex string. +# - register_private_key: Registers a private key directly. +# +# Usage: +# 1. Initialize a Web3 instance connected to an Ethereum node. +# 2. Call register_keys with the Web3 instance and a list of key strings. +# 3. The keys can be in the format key_file=path/to/keyfile,pass_file=path/to/passfile. +# 4. The Web3 instance can then sign and send transactions automatically using the registered accounts. +# +# Example: +# from web3 import Web3 +# web3 = Web3(Web3.HTTPProvider('http://localhost:8545')) +# keys = ['key_file=/path/to/keyfile1,pass_file=/path/to/passfile1', 'key_file=/path/to/keyfile2'] +# register_keys(web3, keys) + + + +import getpass +from typing import Optional, List + +from eth_account import Account +from web3 import Web3 +from web3.middleware import construct_sign_and_send_raw_middleware + +from .address_utils import Address + +_registered_accounts = {} + +def register_keys(web3: Web3, keys: Optional[List[str]]) -> None: + for key in keys or []: + register_key(web3, key) + +def register_key(web3: Web3, key: str) -> None: + assert isinstance(web3, Web3) + + parsed = {var: val for var, val in (p.split("=") for p in key.split(","))} + register_key_file(web3, parsed.get('key_file'), parsed.get('pass_file')) + +def register_key_file(web3: Web3, key_file: str, pass_file: Optional[str] = None) -> None: + assert isinstance(web3, Web3) + assert isinstance(key_file, str) + assert isinstance(pass_file, str) or pass_file is None + + with open(key_file) as key_file_open: + read_key = key_file_open.read() + if pass_file: + with open(pass_file) as pass_file_open: + read_pass = pass_file_open.read().strip() + else: + read_pass = getpass.getpass(prompt=f"Password for {key_file}: ") + + private_key = Account.decrypt(read_key, read_pass) + register_private_key(web3, private_key) + +def get_private_key(web3: Web3, key: str) -> str: + assert isinstance(web3, Web3) + assert isinstance(key, str) + + parsed = {var: val for var, val in (p.split("=") for p in key.split(","))} + with open(parsed.get('key_file')) as key_file_open: + read_key = key_file_open.read() + if parsed.get('pass_file'): + with open(parsed.get('pass_file')) as pass_file_open: + read_pass = pass_file_open.read().strip() + else: + read_pass = getpass.getpass(prompt=f"Password for {parsed.get('key_file')}: ") + + private_key = Account.decrypt(read_key, read_pass).hex() + return private_key + +def register_private_key(web3: Web3, private_key: bytes) -> None: + assert isinstance(web3, Web3) + + account = Account.from_key(private_key) + _registered_accounts[(web3, Address(account.address))] = account + web3.middleware_onion.add(construct_sign_and_send_raw_middleware(account)) diff --git a/requirements.txt b/requirements.txt index 722b516..ed7b72f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ -jsonnet == 0.9.5 -requests == 2.22.0 -web3 == 5.23.0 -eth-abi == 2.1.1 -eth-utils<2.0.0,>=1.9.5 -eth-testrpc == 1.3.0 -rlp == 1.2.0 -tinydb == 3.15.2 -pytz == 2017.3 \ No newline at end of file +jsonnet +requests +web3 +tinydb +pytz +eth-account +eth-utils # For address validation and checksum +parsimonious +eth-abi \ No newline at end of file diff --git a/start-up.sh b/start-up.sh new file mode 100755 index 0000000..0cc6103 --- /dev/null +++ b/start-up.sh @@ -0,0 +1,8 @@ +#!/bin/bash +/Users/jrigor/repos/maker/chief-keeper/bin/chief-keeper \ + --rpc-host https://geth3.mainnet.makerops.services/rpc \ + --network 'mainnet' \ + --eth-from '0x8b4c184918947b52f615FC2aB350e092906b54CB' \ + --eth-key 'key_file=/Users/jrigor/repos/maker/chief-keeper/secrets/mainnet-keystore.json,pass_file=/Users/jrigor/repos/maker/chief-keeper/secrets/mainnet-password.txt' \ + --blocknative-api-key cc92ee09-8d9c-4db4-90a3-9ba180c70d42 \ + --chief-deployment-block 11327777 \ No newline at end of file