Skip to content

Commit

Permalink
Top pairs (#178)
Browse files Browse the repository at this point in the history
- Add: `fetch_top_pairs()`
  • Loading branch information
miohtama authored Oct 16, 2024
1 parent 3031605 commit c3ac900
Show file tree
Hide file tree
Showing 5 changed files with 266 additions and 2 deletions.
41 changes: 41 additions & 0 deletions tests/test_top_pairs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""Test /top endpoint."""
import datetime

from tradingstrategy.chain import ChainId
from tradingstrategy.client import Client
from tradingstrategy.top import TopPairsReply


def test_load_top(persistent_test_client: Client):
"""Load 10 top pairs by liquidity from /top endpoint.
- Integration test
- Get whatever pairs we have today
"""

client = persistent_test_client

top_reply = client.fetch_top_pairs(
chain_ids={ChainId.ethereum},
exchange_slugs={"uniswap-v2", "uniswap-v3"},
limit=10,
)

assert isinstance(top_reply, TopPairsReply)
assert len(top_reply.included) == 10
assert len(top_reply.excluded) > 0 # There is always something to be excluded

# Because this is a dynamic reply,
# we just check accessor methods work
for pair in top_reply.included:
assert pair.get_persistent_id() is not None
assert isinstance(pair.volume_updated_at, datetime.datetime)
assert isinstance(pair.tvl_updated_at, datetime.datetime)
assert isinstance(pair.queried_at, datetime.datetime)
assert pair.volume_24h_usd > 0, f"Top pair issue on {pair}"
assert pair.tvl_latest_usd > 0, f"Top pair issue on {pair}"
if pair.base_token != "WETH":
assert pair.token_sniffer_score, f"Top pair issue on {pair}"
assert pair.token_sniffer_score > 0, f"Top pair issue on {pair}"

57 changes: 57 additions & 0 deletions tradingstrategy/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from tradingstrategy.candle import TradingPairDataAvailability
from tradingstrategy.reader import BrokenData, read_parquet
from tradingstrategy.top import TopPairsReply
from tradingstrategy.transport.pyodide import PYODIDE_API_KEY
from tradingstrategy.types import PrimaryKey, AnyTimestamp
from tradingstrategy.utils.jupyter import is_pyodide
Expand Down Expand Up @@ -738,6 +739,62 @@ def fetch_chain_status(self, chain_id: ChainId) -> dict:
"""Get live information about how a certain blockchain indexing and candle creation is doing."""
return self.transport.fetch_chain_status(chain_id.value)

def fetch_top_pairs(
self,
chain_ids: Collection[ChainId],
exchange_slugs: Collection[str],
limit: int = 100,
method="sorted-by-liquidity-with-filtering",
) -> TopPairsReply:
"""Get new trading pairs to be included in the trading universe.
This endpoint is designed to scan new trading pairs to be included in a trading universe.
It ranks and filters the daily/weekly/etc. interesting trading pairs by a criteria.
The result data is asynchronously filled, and may not return the most fresh situation,
due to data processing delays. So when you call this method `24:00` it does not have
pairs for yesterday ready yet. The results may vary, but should reflect the look back of last 24h.
Various heuristics is applied to the result filtering, like excluding stable pairs,
derivative tokens, choosing the trading pair with the best fee, etc.
When you store the result, you need to use tuple `(chain id, pool address)` as the persistent key.
Any integer primary keys may change over long term.
.. warning::
Depending on the available TokenSniffer data caching, this endpoint may
take up to 15 seconds per token.
**This API is still under heavy development**.
:param chain_ids:
List of blockchains to consider.
:param exchange_slugs:
List of DEXes to consider.
:param limit:
Number of pairs to query.
:param method:
Currently, hardcoded. No other methods supported.
:return:
Top trading pairs included and excluded in the ranking.
"""

assert len(chain_ids) > 0, f"Got {chain_ids}"
assert len(exchange_slugs) > 0, f"Got {exchange_slugs}"
assert 1 < limit <= 500

data = self.transport.fetch_top_pairs(
chain_ids=chain_ids,
exchange_slugs=exchange_slugs,
limit=limit,
)
return TopPairsReply.from_dict(data)

@classmethod
def preflight_check(cls):
"""Checks that everything is in ok to run the notebook"""
Expand Down
2 changes: 1 addition & 1 deletion tradingstrategy/stablecoin.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
#: Note that it is *not* safe to to check the token symbol to know if a token is a specific stablecoin,
#: but you always need to check the contract address.
#: Checking against this list only works
STABLECOIN_LIKE = ("DAI", "USDC", "USDT", "DAI", "BUSD", "UST", "USDN", "LUSD", "VUSD", "USDV", "EUROe", "EURT", "USDP", "iUSD", "USDS", "gmUSD", "USDR", "RAI", "EURS", "TUSD", "EURe", "USD+", "EUROC", "USDs", "USDT.e", "USDC.e", "GHST", "jEUR", "crvUSD", "DOLA", "GUSD", "USDe", "kUSD")
STABLECOIN_LIKE = ("DAI", "USDC", "USDT", "DAI", "BUSD", "UST", "USDN", "LUSD", "VUSD", "USDV", "EUROe", "EURT", "USDP", "iUSD", "USDS", "gmUSD", "USDR", "RAI", "EURS", "TUSD", "EURe", "USD+", "EUROC", "USDs", "USDT.e", "USDC.e", "GHST", "jEUR", "crvUSD", "DOLA", "GUSD", "USDe", "kUSD", "sosUSDT")

#: Stablecoins plus their interest wrapped counterparts on Compound and Aave.
#: Also contains other derivates.
Expand Down
151 changes: 151 additions & 0 deletions tradingstrategy/top.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
"""Top trading pair queries.
- Data structures for /top end point
- Used for adding new pairs to open ended trading universe in external trading signal processor
- See :py:func:`tradingstrategy.client.Client.fetch_top_pairs` for usage.
"""
import datetime

from dataclasses import dataclass, field

from dataclasses_json import dataclass_json, config
from marshmallow import fields


@dataclass_json
@dataclass(slots=True)
class TopPairData:
"""See open-defi-api.yaml"""

#: When this entry was queried
#:
#: Wall clock UTC time.
#:
#: Because the server serialises as ISO, we need special decoder
#:
#: https://github.com/lidatong/dataclasses-json?tab=readme-ov-file#Overriding
#:
queried_at: datetime.datetime = field(
metadata=config(
decoder=datetime.datetime.fromisoformat,
mm_field=fields.DateTime(format='iso')
)
)

#: Blockchain this pair is on
chain_id: int

#: Internal pair primary key (may change)
pair_id: int

#: Internal pair exchange id (may change)
exchange_id: int

#: Human readable exchange URL slug (may change)
exchange_slug: str

#: Smart contract address of pool smart contract.
#:
#: Uniswap v2 pair contract address, Uniswap v3 pool contract address.
#:
pool_address: str

#: Human readable base token
base_token: str

#: Human readable quote token
quote_token: str

#: Pair fee in 0...1, 0.0030 is 30 BPS
fee: float

#: Volume over the last 24h
#:
#: May not be available due to latency/denormalisation/etc. issues
#:
volume_24h_usd: float | None

#: Last USD TVL (Uniswap v3) or XY Liquidity (Uniswap v2)
#:
#: May not be available due to latency/denormalisation/etc. issues
#:
tvl_latest_usd: float | None

#: When TVL measurement was updated.
#:
#: How old data are we using.
#:
tvl_updated_at: datetime.datetime | None = field(
metadata=config(
decoder=datetime.datetime.fromisoformat,
mm_field=fields.DateTime(format='iso')
)
)


#: When volume measurement was updated
#:
#: How old data are we using.
#:
volume_updated_at: datetime.datetime | None = field(
metadata=config(
decoder=datetime.datetime.fromisoformat,
mm_field=fields.DateTime(format='iso')
)
)


#: If this pair was excluded from the top pairs, what was the human-readable heuristics reason we did this.
#:
#: This allows you to diagnose better why some trading pairs might not end up in the trading universe.
#:
exclude_reason: str | None

#: TokenSniffer data for this token.
#:
#: Used in the filtering of scam tokens.
#:
#: Not available for all tokens that are filtered out for other reasons.
#: This is the last check.
#:
#: `See more information here <https://web3-ethereum-defi.readthedocs.io/api/token_analysis/_autosummary_token_analysis/eth_defi.token_analysis.tokensniffer.html>`__.
#:
token_sniffer_data: dict | None

def __repr__(self):
return f"<Pair {self.base_token} - {self.quote_token} on {self.exchange_slug}, address {self.pool_address} - reason {self.exclude_reason}>"

def get_persistent_id(self) -> str:
"""Stable id over long period of time and across different systems."""
return f"{self.chain_id}-{self.pool_address}"

@property
def token_sniffer_score(self) -> int | None:
"""What was the TokenSniffer score for the base token."""

if self.token_sniffer_data is None:
return None

return self.token_sniffer_data["score"]


@dataclass_json
@dataclass(slots=True)
class TopPairsReply:
"""/top endpoint reply.
- Get a list of trading pairs, both included and excluded
"""

#: The top list at the point of time the request was made
included: list[TopPairData]

#: Tokens that were considered for top list, but excluded for some reason
#:
#: They had enough liquidity, but they failed e.g. TokenSniffer scam check,
#: or had a trading pair for the same base token with better fees, etc.
#:
excluded: list[TopPairData]
17 changes: 16 additions & 1 deletion tradingstrategy/transport/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ def save_response(self, fpath, api_path, params=None, human_readable_hint: Optio
# https://stackoverflow.com/a/14114741/315168
self.download_func(self.requests, fpath, url, params, self.timeout, human_readable_hint)

def get_json_response(self, api_path, params=None):
def get_json_response(self, api_path, params=None) -> dict:
url = f"{self.endpoint}/{api_path}"
logger.debug("get_json_response() %s, %s", url, params)
response = self.requests.get(url, params=params)
Expand All @@ -340,6 +340,21 @@ def fetch_chain_status(self, chain_id: int) -> dict:
"""Not cached."""
return self.get_json_response("chain-status", params={"chain_id": chain_id})

def fetch_top_pairs(
self,
limit: int,
chain_ids: Collection[ChainId],
exchange_slugs: Collection[str],
) -> dict:
"""Not cached."""
params = {
"chain_slugs": ",".join([c.get_slug() for c in chain_ids]),
"exchange_slugs": ",".join([e for e in exchange_slugs]),
"limit": str(limit),
}
resp = self.get_json_response("top", params=params)
return resp

def fetch_pair_universe(self) -> pathlib.Path:
fname = "pair-universe.parquet"
cached = self.get_cached_item(fname)
Expand Down

0 comments on commit c3ac900

Please sign in to comment.