forked from PowerLoom/node-issue-report-collector
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathepoch_generator.py
348 lines (309 loc) · 15.1 KB
/
epoch_generator.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
import asyncio
import json
import random
import threading
import time
from functools import wraps
from multiprocessing import Process
from signal import SIGINT
from signal import signal
from signal import SIGQUIT
from signal import SIGTERM
import uvloop
from httpx import AsyncClient
from httpx import AsyncHTTPTransport
from httpx import Limits
from httpx import Timeout
from redis import asyncio as aioredis
from setproctitle import setproctitle
from web3 import AsyncHTTPProvider
from web3 import AsyncWeb3
from data_models import GenericTxnIssue
from exceptions import GenericExitOnSignal
from helpers.message_models import RPCNodesObject
from helpers.redis_keys import get_epoch_generator_epoch_history
from helpers.redis_keys import get_epoch_generator_last_epoch
from helpers.rpc_helper import ConstructRPC
from settings.conf import settings
from utils.chunk_helper import chunks
from utils.default_logger import logger
from utils.notification_utils import send_failure_notifications
from utils.redis_conn import RedisPool
from utils.transaction_utils import write_transaction
from utils.transaction_utils import write_transaction_with_receipt
protocol_state_contract_address = settings.protocol_state_address
# load abi from json file and create contract object
with open('utils/static/abi.json', 'r') as f:
abi = json.load(f)
w3 = AsyncWeb3(AsyncHTTPProvider(settings.anchor_chain.rpc.full_nodes[0].url))
protocol_state_contract = w3.eth.contract(
address=settings.protocol_state_address, abi=abi,
)
def redis_cleanup(fn):
@wraps(fn)
async def wrapper(self, *args, **kwargs):
try:
await fn(self, *args, **kwargs)
except (GenericExitOnSignal, KeyboardInterrupt):
try:
self._logger.debug('Waiting for pushing latest epoch to Redis')
if self.last_sent_block:
await self._writer_redis_pool.set(get_epoch_generator_last_epoch(), self.last_sent_block)
self._logger.debug(
'Shutting down after sending out last epoch with end block height as {},'
' starting blockHeight to be used during next restart is {}', self.last_sent_block, self.last_sent_block + 1,
)
except Exception as E:
self._logger.error('Error while saving last state: {}', E)
except Exception as E:
self._logger.error('Error while running process: {}', E)
finally:
self._logger.debug('Shutting down')
return wrapper
class EpochGenerator:
_aioredis_pool: RedisPool
_reader_redis_pool: aioredis.Redis
_writer_redis_pool: aioredis.Redis
def __init__(self, name='PowerLoom|OnChainConsensus|EpochGenerator'):
self.name = name
setproctitle(self.name)
self._logger = logger.bind(module=self.name)
self._shutdown_initiated = False
self.last_sent_block = 0
self._end = None
self._nonce = -1
self._async_transport = None
self._client = None
async def setup(self):
self._aioredis_pool = RedisPool(writer_redis_conf=settings.redis)
self._nonce = await w3.eth.get_transaction_count(
settings.validator_epoch_address,
)
await self._aioredis_pool.populate()
self._reader_redis_pool = self._aioredis_pool.reader_redis_pool
self._writer_redis_pool = self._aioredis_pool.writer_redis_pool
self.redis_thread: threading.Thread
await self._init_httpx_client()
async def _init_httpx_client(self):
if self._async_transport is not None:
return
self._async_transport = AsyncHTTPTransport(
limits=Limits(
max_connections=100,
max_keepalive_connections=50,
keepalive_expiry=None,
),
)
self._client = AsyncClient(
timeout=Timeout(timeout=30.0),
follow_redirects=False,
transport=self._async_transport,
)
def _generic_exit_handler(self, signum, sigframe):
if signum in [SIGINT, SIGTERM, SIGQUIT] and not self._shutdown_initiated:
self._shutdown_initiated = True
raise GenericExitOnSignal
async def _fetch_epoch_from_contract(self) -> int:
last_epoch_data = await protocol_state_contract.functions.currentEpoch().call()
if last_epoch_data[1]:
self._logger.debug(
'Found last epoch block : {} in contract. Starting from checkpoint.', last_epoch_data[
1
],
)
begin_block_epoch = last_epoch_data[1] + 1
return begin_block_epoch
else:
self._logger.debug(
'No last epoch block found in contract. Starting from configured block in settings.',
)
return -1
@redis_cleanup
async def run(self):
await self.setup()
begin_block_epoch = settings.ticker_begin_block if settings.ticker_begin_block else 0
for signame in [SIGINT, SIGTERM, SIGQUIT]:
signal(signame, self._generic_exit_handler)
last_contract_epoch = await self._fetch_epoch_from_contract()
if last_contract_epoch != -1:
begin_block_epoch = last_contract_epoch
# waiting to release epoch chunks every half of block time
sleep_secs_between_chunks = settings.chain.epoch.block_time // 2
rpc_obj = ConstructRPC(network_id=settings.chain.chain_id)
rpc_urls = []
for node in settings.chain.rpc.full_nodes:
self._logger.debug('node {}', node.url)
rpc_urls.append(node.url)
rpc_nodes_obj = RPCNodesObject(
NODES=rpc_urls,
RETRY_LIMIT=settings.chain.rpc.retry,
)
self._logger.debug('Starting {}', Process.name)
while True:
try:
cur_block = rpc_obj.rpc_eth_blocknumber(
rpc_nodes=rpc_nodes_obj,
)
except Exception as ex:
self._logger.error(
'Unable to fetch latest block number due to RPC failure {}. Retrying after {} seconds.',
ex,
settings.chain.epoch.block_time,
)
await asyncio.sleep(settings.chain.epoch.block_time)
continue
else:
self._logger.debug('Got current head of chain: {}', cur_block)
if not begin_block_epoch:
self._logger.debug('Begin of epoch not set')
begin_block_epoch = cur_block
self._logger.debug(
'Set begin of epoch to current head of chain: {}', cur_block,
)
self._logger.debug(
'Sleeping for: {} seconds', settings.chain.epoch.block_time,
)
await asyncio.sleep(settings.chain.epoch.block_time)
else:
# self._logger.debug('Picked begin of epoch: {}', begin_block_epoch)
end_block_epoch = cur_block - settings.chain.epoch.head_offset
if not (end_block_epoch - begin_block_epoch + 1) >= settings.chain.epoch.height:
sleep_factor = settings.chain.epoch.height - \
((end_block_epoch - begin_block_epoch) + 1)
self._logger.debug(
'Current head of source chain estimated at block {} after offsetting | '
'{} - {} does not satisfy configured epoch length. '
'Sleeping for {} seconds for {} blocks to accumulate....',
end_block_epoch, begin_block_epoch, end_block_epoch,
sleep_factor * settings.chain.epoch.block_time, sleep_factor,
)
await asyncio.sleep(
sleep_factor *
settings.chain.epoch.block_time,
)
continue
self._logger.debug(
'Chunking blocks between {} - {} with chunk size: {}', begin_block_epoch,
end_block_epoch, settings.chain.epoch.height,
)
for epoch in chunks(begin_block_epoch, end_block_epoch, settings.chain.epoch.height):
if epoch[1] - epoch[0] + 1 < settings.chain.epoch.height:
self._logger.debug(
'Skipping chunk of blocks {} - {} as minimum epoch size not satisfied | '
'Resetting chunking to begin from block {}',
epoch[0], epoch[1], epoch[0],
)
begin_block_epoch = epoch[0]
break
epoch_block = {'begin': epoch[0], 'end': epoch[1]}
self._logger.debug(
'Epoch of sufficient length found: {}', epoch_block,
)
try:
self._logger.info('Attempting to release epoch {}', epoch_block)
rand = random.random()
if rand < 0.1:
tx_hash, receipt = await write_transaction_with_receipt(
w3,
settings.validator_epoch_address,
settings.validator_epoch_private_key,
protocol_state_contract,
'releaseEpoch',
self._nonce,
epoch_block['begin'],
epoch_block['end'],
)
if receipt['status'] != 1:
self._logger.error(
'Unable to release epoch, txn failed! Got receipt: {}', receipt,
)
issue = GenericTxnIssue(
accountAddress=settings.validator_epoch_address,
epochBegin=epoch_block['begin'],
issueType='EpochReleaseTxnFailed',
extra=json.dumps(receipt),
)
await send_failure_notifications(client=self._client, message=issue)
# sleep for 30 seconds to avoid nonce collision
time.sleep(30)
# reset nonce
self._nonce = await w3.eth.get_transaction_count(
settings.validator_epoch_address,
)
last_contract_epoch = await self._fetch_epoch_from_contract()
if last_contract_epoch != -1:
begin_block_epoch = last_contract_epoch
continue
else:
tx_hash = await write_transaction(
w3,
settings.validator_epoch_address,
settings.validator_epoch_private_key,
protocol_state_contract,
'releaseEpoch',
self._nonce,
epoch_block['begin'],
epoch_block['end'],
)
self._nonce += 1
self._logger.debug(
'Epoch Released! Transaction hash: {}', tx_hash,
)
except Exception as ex:
self._logger.error(
'Unable to release epoch, error: {}', ex,
)
issue = GenericTxnIssue(
accountAddress=settings.validator_epoch_address,
epochBegin=epoch_block['begin'],
issueType='EpochReleaseError',
extra=str(ex),
)
await send_failure_notifications(client=self._client, message=issue)
# sleep for 30 seconds to avoid nonce collision
time.sleep(30)
# reset nonce
self._nonce = await w3.eth.get_transaction_count(
settings.validator_epoch_address,
)
last_contract_epoch = await self._fetch_epoch_from_contract()
if last_contract_epoch != -1:
begin_block_epoch = last_contract_epoch
await self._writer_redis_pool.set(
name=get_epoch_generator_last_epoch(),
value=epoch_block['end'],
)
await self._writer_redis_pool.zadd(
name=get_epoch_generator_epoch_history(),
mapping={
json.dumps({'begin': epoch_block['begin'], 'end': epoch_block['end']}): int(
time.time(),
),
},
)
epoch_generator_history_len = await self._writer_redis_pool.zcard(
get_epoch_generator_epoch_history(),
)
# Remove oldest epoch history if length exceeds configured limit
history_len = settings.chain.epoch.history_length
if epoch_generator_history_len > history_len:
await self._writer_redis_pool.zremrangebyrank(
get_epoch_generator_epoch_history(), 0,
-history_len,
)
self.last_sent_block = epoch_block['end']
self._logger.debug(
'Waiting to push next epoch in {} seconds...', sleep_secs_between_chunks,
)
# fixed wait
await asyncio.sleep(sleep_secs_between_chunks)
else:
begin_block_epoch = end_block_epoch + 1
def main():
"""Spin up the ticker process in event loop"""
loop = uvloop.new_event_loop()
asyncio.set_event_loop(loop)
ticker_process = EpochGenerator()
loop.run_until_complete(ticker_process.run())
if __name__ == '__main__':
main()