Skip to content

Commit

Permalink
Merge pull request #408 from enarjord/v5.9.10_exposure_ratios_mean
Browse files Browse the repository at this point in the history
V5.9.10 exposure ratios mean
  • Loading branch information
enarjord authored Jun 3, 2023
2 parents e57ae6d + 0563421 commit 11f8a55
Show file tree
Hide file tree
Showing 11 changed files with 172 additions and 105 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

:warning: **Use at own risk** :warning:

v5.9.8
v5.9.10


## Overview
Expand Down
2 changes: 1 addition & 1 deletion backtest.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,7 +238,7 @@ async def main():
print(f"{k: <{max(map(len, keys)) + 2}} {config[k]}")
print()
if config["ohlcv"]:
data = load_hlc_cache(
data = await load_hlc_cache(
symbol,
config["inverse"],
config["start_date"],
Expand Down
24 changes: 13 additions & 11 deletions configs/optimize/default.hjson
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,10 @@
maximum_eqbal_ratio_std_long: 0.025
maximum_eqbal_ratio_std_short: 0.025

# score -= max(exposure_ratios_mean, eqbal_ratio_std)
maximum_exposure_ratios_mean_long: 0.1
maximum_exposure_ratios_mean_short: 0.1

# clip results: compute score on top performers only
# clip_threshold=0.1 means drop 10% worst performers; clip_threshold=0.0 means include all
clip_threshold: 0.5
Expand All @@ -74,17 +78,15 @@
"DEFIUSDT", "YFIUSDT", "BALUSDT", "CRVUSDT", "TRBUSDT",
"RUNEUSDT", "SUSHIUSDT", "EGLDUSDT", "SOLUSDT", "ICXUSDT",
"STORJUSDT", "BLZUSDT", "UNIUSDT", "AVAXUSDT", "FTMUSDT",
"HNTUSDT", "ENJUSDT", "FLMUSDT", "TOMOUSDT", "RENUSDT",
"KSMUSDT", "NEARUSDT", "AAVEUSDT", "FILUSDT", "RSRUSDT",
"LRCUSDT", "MATICUSDT", "OCEANUSDT", "BELUSDT", "CTKUSDT",
"AXSUSDT", "ALPHAUSDT", "ZENUSDT", "SKLUSDT", "GRTUSDT",
"1INCHUSDT", "CHZUSDT", "SANDUSDT", "ANKRUSDT", "LITUSDT",
"UNFIUSDT", "REEFUSDT", "RVNUSDT", "SFPUSDT", "XEMUSDT",
"COTIUSDT", "CHRUSDT", "MANAUSDT", "ALICEUSDT", "HBARUSDT",
"ONEUSDT", "LINAUSDT", "STMXUSDT", "DENTUSDT", "CELRUSDT",
"HOTUSDT", "MTLUSDT", "OGNUSDT", "NKNUSDT", "DGBUSDT",


"ENJUSDT", "FLMUSDT", "TOMOUSDT", "RENUSDT","KSMUSDT",
"NEARUSDT", "AAVEUSDT", "FILUSDT", "RSRUSDT","LRCUSDT",
"MATICUSDT", "OCEANUSDT", "BELUSDT", "CTKUSDT","AXSUSDT",
"ALPHAUSDT", "ZENUSDT", "SKLUSDT", "GRTUSDT","1INCHUSDT",
"CHZUSDT", "SANDUSDT", "ANKRUSDT", "LITUSDT","UNFIUSDT",
"REEFUSDT", "RVNUSDT", "SFPUSDT", "XEMUSDT","COTIUSDT",
"CHRUSDT", "MANAUSDT", "ALICEUSDT", "HBARUSDT","ONEUSDT",
"LINAUSDT", "STMXUSDT", "DENTUSDT", "CELRUSDT","HOTUSDT",
"MTLUSDT", "OGNUSDT", "NKNUSDT", "DGBUSDT",
]

bounds_static_grid:
Expand Down
117 changes: 104 additions & 13 deletions downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from urllib.request import urlopen
from zipfile import ZipFile
import traceback
import aiohttp

import numpy as np
import pandas as pd
Expand All @@ -29,7 +30,7 @@
add_argparse_args,
utc_ms,
)
from pure_funcs import ts_to_date, ts_to_date_utc, date_to_ts, get_dummy_settings
from pure_funcs import ts_to_date, ts_to_date_utc, date_to_ts2, get_dummy_settings, get_day


class Downloader:
Expand Down Expand Up @@ -981,11 +982,97 @@ def get_first_ohlcv_ts(symbol: str, spot=False) -> int:
return 0


def get_csv_gz(url: str):
def findall(string, pattern):
"""Yields all the positions of
the pattern in the string"""
i = string.find(pattern)
while i != -1:
yield i
i = string.find(pattern, i + 1)


def get_days_in_between(start_day, end_day):
date_format = "%Y-%m-%d"
start_date = datetime.datetime.strptime(start_day, date_format)
end_date = datetime.datetime.strptime(end_day, date_format)

days_in_between = []
current_date = start_date
while current_date <= end_date:
days_in_between.append(current_date.strftime(date_format))
current_date += datetime.timedelta(days=1)

return days_in_between


async def download_ohlcvs_bybit(symbol, start_date, end_date, download_only=False):
start_date, end_date = get_day(start_date), get_day(end_date)
assert date_to_ts2(end_date) >= date_to_ts2(start_date), "end_date is older than start_date"
dirpath = make_get_filepath(f"historical_data/ohlcvs_bybit/{symbol}/")
ideal_days = get_days_in_between(start_date, end_date)
days_done = [filename[:-4] for filename in os.listdir(dirpath) if ".csv" in filename]
days_to_get = [day for day in ideal_days if day not in days_done]
dfs = {}
if len(days_to_get) > 0:
base_url = "https://public.bybit.com/trading/"
webpage = await get_bybit_webpage(base_url, symbol)
filenames = [cand for day in days_to_get if (cand := f"{symbol}{day}.csv.gz") in webpage]
if len(filenames) > 0:
n_concurrent_fetches = 10
for i in range(0, len(filenames), 10):
filenames_sublist = filenames[i : i + n_concurrent_fetches]
print(
f"fetching trades from {filenames_sublist[0][-17:-7]} to {filenames_sublist[-1][-17:-7]}"
)
dfs_ = await get_bybit_trades(base_url, symbol, filenames_sublist)
dfs_ = {k[-17:-7]: convert_to_ohlcv(v) for k, v in dfs_.items()}
dumped = []
for day, df in sorted(dfs_.items()):
if day in days_done:
continue
filepath = f"{dirpath}{day}.csv"
df.to_csv(filepath)
dumped.append(day)
if not download_only:
dfs.update(dfs_)
if not download_only:
for day in ideal_days:
if day not in days_to_get:
dfs[day] = pd.read_csv(f"{dirpath}{day}.csv")
if len(dfs) == 0:
return pd.DataFrame(columns=["timestamp", "open", "high", "low", "close", "volume"])
df = pd.concat(dfs.values()).sort_values("timestamp").reset_index()
return df[["timestamp", "open", "high", "low", "close", "volume"]]


async def get_bybit_webpage(base_url: str, symbol: str):
return urlopen(f"{base_url}{symbol}/").read().decode()


async def get_bybit_trades(base_url: str, symbol: str, filenames: [str]):
if len(filenames) == 0:
return None
async with aiohttp.ClientSession() as session:
tasks = {}
for url in [f"{base_url}{symbol}/{filename}" for filename in filenames]:
tasks[url] = asyncio.ensure_future(get_csv_gz(session, url))
responses = {}
for url in tasks:
responses[url] = await tasks[url]
return {k: v.sort_values("timestamp") for k, v in responses.items()}


async def fetch_url(session, url):
async with session.get(url) as response:
content = await response.read()
return content


async def get_csv_gz(session, url: str):
# from bybit
try:
resp = urlopen(url)
with gzip.open(BytesIO(resp.read())) as f:
resp = await fetch_url(session, url)
with gzip.open(BytesIO(resp)) as f:
tdf = pd.read_csv(f)
return tdf
except Exception as e:
Expand Down Expand Up @@ -1023,8 +1110,8 @@ def download_ohlcvs(
base_url = "https://data.binance.vision/data/"
base_url += "spot/" if spot else f"futures/{'cm' if inverse else 'um'}/"
col_names = ["timestamp", "open", "high", "low", "close", "volume"]
start_ts = max(get_first_ohlcv_ts(symbol, spot=spot), date_to_ts(start_date))
end_ts = date_to_ts(end_date)
start_ts = max(get_first_ohlcv_ts(symbol, spot=spot), date_to_ts2(start_date))
end_ts = date_to_ts2(end_date)
days = [ts_to_date_utc(x)[:10] for x in list(range(start_ts, end_ts, 1000 * 60 * 60 * 24))]
months = sorted({x[:7] for x in days})
month_now = ts_to_date(time())[:7]
Expand Down Expand Up @@ -1100,12 +1187,12 @@ def count_longest_identical_data(hlc, symbol):
return longest_consecutive


def load_hlc_cache(
async def load_hlc_cache(
symbol, inverse, start_date, end_date, base_dir="backtests", spot=False, exchange="binance"
):
cache_fname = (
f"{ts_to_date_utc(date_to_ts(start_date))[:10]}_"
+ f"{ts_to_date_utc(date_to_ts(end_date))[:10]}_ohlcv_cache.npy"
f"{ts_to_date_utc(date_to_ts2(start_date))[:10]}_"
+ f"{ts_to_date_utc(date_to_ts2(end_date))[:10]}_ohlcv_cache.npy"
)

filepath = make_get_filepath(
Expand All @@ -1114,9 +1201,12 @@ def load_hlc_cache(
if os.path.exists(filepath):
data = np.load(filepath)
else:
df = download_ohlcvs(symbol, inverse, start_date, end_date, spot)
df = df[df.timestamp >= date_to_ts(start_date)]
df = df[df.timestamp <= date_to_ts(end_date)]
if exchange == "bybit":
df = await download_ohlcvs_bybit(symbol, start_date, end_date, download_only=False)
else:
df = download_ohlcvs(symbol, inverse, start_date, end_date, spot)
df = df[df.timestamp >= date_to_ts2(start_date)]
df = df[df.timestamp <= date_to_ts2(end_date)]
data = df[["timestamp", "high", "low", "close"]].values
np.save(filepath, data)
try:
Expand All @@ -1141,12 +1231,13 @@ async def main():
args = parser.parse_args()
config = await prepare_backtest_config(args)
if config["ohlcv"]:
data = load_hlc_cache(
data = await load_hlc_cache(
config["symbol"],
config["inverse"],
config["start_date"],
config["end_date"],
spot=config["spot"],
exchange=config["exchange"],
)
else:
downloader = Downloader(config)
Expand Down
1 change: 0 additions & 1 deletion harmony_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

os.environ["NOJIT"] = "false"

from downloader import Downloader, load_hlc_cache
import argparse
import asyncio
import json
Expand Down
2 changes: 2 additions & 0 deletions inspect_opt_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ def main():
("ers", "minimum_eqbal_ratio_mean_of_10_worst_short"),
("esl", "maximum_eqbal_ratio_std_long"),
("ess", "maximum_eqbal_ratio_std_short"),
("exl", "maximum_exposure_ratios_mean_long"),
("exs", "maximum_exposure_ratios_mean_short"),
("ct", "clip_threshold"),
]
for k0, k1 in weights_keys:
Expand Down
2 changes: 1 addition & 1 deletion optimize.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,7 +267,7 @@ async def run_opt(args, config):
args.symbol = symbol
tmp_cfg = await prepare_backtest_config(args)
if config["ohlcv"]:
data = load_hlc_cache(
data = await load_hlc_cache(
symbol,
config["inverse"],
config["start_date"],
Expand Down
1 change: 0 additions & 1 deletion particle_swarm_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

os.environ["NOJIT"] = "false"

from downloader import Downloader, load_hlc_cache
import argparse
import asyncio
import json
Expand Down
8 changes: 4 additions & 4 deletions passivbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -1466,8 +1466,8 @@ def calc_minutes_until_next_orders(self):
millis_delay_next_entry_short = calc_delay_between_fills_ms_ask(
self.position["short"]["price"],
self.price,
self.xk["delay_between_fills_ms_entry"][0],
self.xk["delay_weight_entry"][0],
self.xk["delay_between_fills_ms_entry"][1],
self.xk["delay_weight_entry"][1],
)
millis_since_prev_close_short = (
self.server_time - self.last_fills_timestamps["clock_entry_short"]
Expand All @@ -1478,8 +1478,8 @@ def calc_minutes_until_next_orders(self):
millis_delay_next_close_short = calc_delay_between_fills_ms_bid(
self.position["short"]["price"],
self.price,
self.xk["delay_between_fills_ms_close"][0],
self.xk["delay_weight_close"][0],
self.xk["delay_between_fills_ms_close"][1],
self.xk["delay_weight_close"][1],
)
millis_since_prev_close_short = (
self.server_time - self.last_fills_timestamps["clock_close_short"]
Expand Down
10 changes: 5 additions & 5 deletions procedures.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
ts_to_date_utc,
get_dummy_settings,
config_pretty_str,
date_to_ts,
date_to_ts2,
get_template_live_config,
sort_dict_keys,
make_compatible,
Expand Down Expand Up @@ -87,8 +87,8 @@ async def prepare_backtest_config(args) -> dict:
config["spot"] = False
else:
config["spot"] = args.market_type == "spot"
config["start_date"] = ts_to_date_utc(date_to_ts(config["start_date"]))[:10]
config["end_date"] = ts_to_date_utc(date_to_ts(config["end_date"]))[:10]
config["start_date"] = ts_to_date_utc(date_to_ts2(config["start_date"]))[:10]
config["end_date"] = ts_to_date_utc(date_to_ts2(config["end_date"]))[:10]
config["exchange"] = load_exchange_key_secret_passphrase(config["user"])[0]
config["session_name"] = (
f"{config['start_date'].replace(' ', '').replace(':', '').replace('.', '')}_"
Expand Down Expand Up @@ -458,8 +458,8 @@ def make_tick_samples(config: dict, sec_span: int = 1):
"""
for key in ["exchange", "symbol", "spot", "start_date", "end_date"]:
assert key in config
start_ts = date_to_ts(config["start_date"])
end_ts = date_to_ts(config["end_date"])
start_ts = date_to_ts2(config["start_date"])
end_ts = date_to_ts2(config["end_date"])
ticks_filepath = os.path.join(
"historical_data",
config["exchange"],
Expand Down
Loading

0 comments on commit 11f8a55

Please sign in to comment.