Skip to content

Commit

Permalink
Merge pull request #291 from Chia-Network/ak.update-pool-ref-to-chia-2.0
Browse files Browse the repository at this point in the history
Update reference pool server to chia 2.0
  • Loading branch information
emlowe authored May 21, 2024
2 parents c156736 + 2caedfb commit 691a29d
Show file tree
Hide file tree
Showing 12 changed files with 240 additions and 152 deletions.
3 changes: 3 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[mypy]
ignore_missing_imports = True
show_error_codes = True
10 changes: 5 additions & 5 deletions pool/difficulty_adjustment.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Tuple, List
from typing import List, Tuple

from chia.util.ints import uint64

Expand Down Expand Up @@ -27,12 +27,12 @@ def get_new_difficulty(
# Lower the difficulty if we are really slow since our last partial
last_timestamp = recent_partials[0][0]
if current_time - last_timestamp > 3 * 3600:
return max(min_difficulty, current_difficulty // 5)
return uint64(max(min_difficulty, current_difficulty // 5))

if current_time - last_timestamp > 3600:
return max(min_difficulty, uint64(int(current_difficulty // 1.5)))
return uint64(max(min_difficulty, uint64(int(current_difficulty // 1.5))))

time_taken = uint64(recent_partials[0][0] - recent_partials[-1][0])
time_taken = (recent_partials[0][0] - recent_partials[-1][0]) * 1.0

# If we don't have enough partials at this difficulty and time between last and
# 1st partials is below target time, don't update yet
Expand All @@ -45,4 +45,4 @@ def get_new_difficulty(

# Finally, this is the standard case of normal farming and slow (or no) growth, adjust to the new difficulty
new_difficulty = uint64(int(current_difficulty * time_target / time_taken))
return max(min_difficulty, new_difficulty)
return uint64(max(min_difficulty, new_difficulty))
125 changes: 79 additions & 46 deletions pool/pool.py

Large diffs are not rendered by default.

40 changes: 22 additions & 18 deletions pool/pool_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,37 +4,37 @@
import ssl
import time
import traceback
from typing import Dict, Callable, Optional
from typing import Callable, Dict, Optional, Union

import aiohttp
import yaml
from blspy import AugSchemeMPL, G2Element
from aiohttp import web
from blspy import AugSchemeMPL, G2Element
from chia.consensus.constants import ConsensusConstants
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.protocols.pool_protocol import (
PoolErrorCode,
POOL_PROTOCOL_VERSION,
AuthenticationPayload,
GetFarmerResponse,
GetPoolInfoResponse,
PostPartialRequest,
PoolErrorCode,
PostFarmerRequest,
PostPartialRequest,
PutFarmerRequest,
validate_authentication_token,
POOL_PROTOCOL_VERSION,
AuthenticationPayload,
)
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.util.byte_types import hexstr_to_bytes
from chia.util.config import load_config
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.hash import std_hash
from chia.consensus.default_constants import DEFAULT_CONSTANTS
from chia.consensus.constants import ConsensusConstants
from chia.util.ints import uint8, uint32, uint64
from chia.util.json_util import obj_to_response
from chia.util.ints import uint8, uint64, uint32
from chia.util.default_root import DEFAULT_ROOT_PATH
from chia.util.config import load_config

from .record import FarmerRecord
from .pool import Pool
from .record import FarmerRecord
from .store.abstract import AbstractPoolStore
from .util import error_response, RequestMetadata
from .util import RequestMetadata, error_response


def allow_cors(response: web.Response) -> web.Response:
Expand Down Expand Up @@ -116,7 +116,7 @@ async def get_pool_info(self, _) -> web.Response:

async def get_farmer(self, request_obj) -> web.Response:
# TODO(pool): add rate limiting
launcher_id: bytes32 = hexstr_to_bytes(request_obj.rel_url.query["launcher_id"])
launcher_id: bytes32 = bytes32(hexstr_to_bytes(request_obj.rel_url.query["launcher_id"]))
authentication_token = uint64(request_obj.rel_url.query["authentication_token"])

authentication_token_error: Optional[web.Response] = check_authentication_token(
Expand Down Expand Up @@ -228,7 +228,11 @@ async def post_partial(self, request_obj) -> web.Response:
f"Farmer with launcher_id {partial.payload.launcher_id.hex()} not known.",
)

post_partial_response = await self.pool.process_partial(partial, farmer_record, uint64(int(start_time)))
peak_height = self.pool.blockchain_state["peak"].height
# Note the use of peak_height + 1. We Are evaluating the suitability for the next block
post_partial_response = await self.pool.process_partial(
partial, farmer_record, uint64(int(start_time)), peak_height + 1
)

self.pool.log.info(
f"post_partial response {post_partial_response}, time: {time.time() - start_time} "
Expand All @@ -238,7 +242,7 @@ async def post_partial(self, request_obj) -> web.Response:

async def get_login(self, request_obj) -> web.Response:
# TODO(pool): add rate limiting
launcher_id: bytes32 = hexstr_to_bytes(request_obj.rel_url.query["launcher_id"])
launcher_id: bytes32 = bytes32(hexstr_to_bytes(request_obj.rel_url.query["launcher_id"]))
authentication_token: uint64 = uint64(request_obj.rel_url.query["authentication_token"])
authentication_token_error = check_authentication_token(
launcher_id, authentication_token, self.pool.authentication_token_timeout
Expand Down Expand Up @@ -267,9 +271,9 @@ async def get_login(self, request_obj) -> web.Response:

return await self.login_response(launcher_id)

async def login_response(self, launcher_id):
async def login_response(self, launcher_id) -> web.Response:
record: Optional[FarmerRecord] = await self.pool.store.get_farmer_record(launcher_id)
response = {}
response: Dict[str, Union[FarmerRecord, list[tuple[uint64, uint64]]]] = {}
if record is not None:
response["farmer_record"] = record
recent_partials = await self.pool.store.get_recent_partials(launcher_id, 20)
Expand Down
2 changes: 1 addition & 1 deletion pool/record.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from chia.types.blockchain_format.sized_bytes import bytes32
from chia.types.coin_spend import CoinSpend
from chia.util.ints import uint64
from chia.util.streamable import streamable, Streamable
from chia.util.streamable import Streamable, streamable


@streamable
Expand Down
25 changes: 16 additions & 9 deletions pool/singleton.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
from typing import List, Optional, Tuple
import logging
from typing import List, Optional, Tuple

from blspy import G2Element
from chia.consensus.coinbase import pool_parent_id
from chia.pools.pool_puzzles import (
create_absorb_spend,
solution_to_pool_state,
get_most_recent_singleton_coin_from_coin_spend,
pool_state_to_inner_puzzle,
create_full_puzzle,
get_delayed_puz_info_from_launcher_spend,
get_most_recent_singleton_coin_from_coin_spend,
pool_state_to_inner_puzzle,
solution_to_pool_state,
)
from chia.pools.pool_wallet import PoolSingletonState
from chia.pools.pool_wallet_info import PoolState
Expand All @@ -25,7 +25,6 @@
from chia.util.ints import uint32, uint64
from chia.wallet.transaction_record import TransactionRecord


from .record import FarmerRecord

log = logging
Expand Down Expand Up @@ -59,6 +58,7 @@ async def get_singleton_state(
confirmation_security_threshold: int,
genesis_challenge: bytes32,
) -> Optional[Tuple[CoinSpend, PoolState, PoolState]]:
last_spend: Optional[CoinSpend]
try:
if farmer_record is None:
launcher_coin: Optional[CoinRecord] = await node_rpc_client.get_coin_record_by_name(launcher_id)
Expand All @@ -69,10 +69,16 @@ async def get_singleton_state(
log.warning(f"Genesis coin {launcher_id} not spent")
return None

last_spend: Optional[CoinSpend] = await get_coin_spend(node_rpc_client, launcher_coin)
last_spend = await get_coin_spend(node_rpc_client, launcher_coin)
if last_spend is None:
raise RuntimeError(
f"Failed to get_coin_spend from {node_rpc_client.hostname}:{node_rpc_client.port}"
f" for singleton {launcher_coin}"
)
delay_time, delay_puzzle_hash = get_delayed_puz_info_from_launcher_spend(last_spend)
saved_state = solution_to_pool_state(last_spend)
assert last_spend is not None and saved_state is not None
if saved_state is None:
raise RuntimeError(f"solution_to_pool_state failed to get state for spend {last_spend}")
else:
last_spend = farmer_record.singleton_tip
saved_state = farmer_record.singleton_tip_state
Expand Down Expand Up @@ -108,7 +114,7 @@ async def get_singleton_state(
return None
break

last_spend: Optional[CoinSpend] = await get_coin_spend(node_rpc_client, next_coin_record)
last_spend = await get_coin_spend(node_rpc_client, next_coin_record)
assert last_spend is not None

pool_state: Optional[PoolState] = solution_to_pool_state(last_spend)
Expand Down Expand Up @@ -145,7 +151,7 @@ async def create_absorb_transaction(
peak_height: uint32,
reward_coin_records: List[CoinRecord],
genesis_challenge: bytes32,
fee_amount: Optional[uint64] = None,
fee_amount: uint64 = uint64(0),
wallet_rpc_client: Optional[WalletRpcClient] = None,
fee_target_puzzle_hash: Optional[bytes32] = None,
) -> Optional[SpendBundle]:
Expand Down Expand Up @@ -193,6 +199,7 @@ async def create_absorb_transaction(

if len(coin_announcements) > 0:
# address can be anything
assert wallet_rpc_client
signed_transaction: TransactionRecord = await wallet_rpc_client.create_signed_transaction(
additions=[{"amount": uint64(1), "puzzle_hash": fee_target_puzzle_hash}],
fee=uint64(fee_amount * len(coin_announcements)),
Expand Down
4 changes: 2 additions & 2 deletions pool/store/abstract.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
import asyncio
from typing import Optional, Set, List, Tuple
from abc import ABC, abstractmethod
from typing import List, Optional, Set, Tuple

from chia.pools.pool_wallet_info import PoolState
from chia.types.blockchain_format.sized_bytes import bytes32
Expand Down
Loading

0 comments on commit 691a29d

Please sign in to comment.