Skip to content

Commit cac2c36

Browse files
authored
Merge branch 'freqtrade:develop' into develop
2 parents 1d99b7d + 23e4943 commit cac2c36

File tree

20 files changed

+162
-101
lines changed

20 files changed

+162
-101
lines changed

.github/workflows/ci.yml

Lines changed: 14 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,9 @@ jobs:
3838
python-version: ${{ matrix.python-version }}
3939

4040
- name: Install uv
41-
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
41+
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
4242
with:
43+
activate-environment: true
4344
enable-cache: true
4445
python-version: ${{ matrix.python-version }}
4546
cache-dependency-glob: "requirements**.txt"
@@ -144,7 +145,7 @@ jobs:
144145
mypy freqtrade scripts tests
145146
146147
- name: Discord notification
147-
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
148+
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
148149
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
149150
with:
150151
severity: error
@@ -170,8 +171,9 @@ jobs:
170171
check-latest: true
171172

172173
- name: Install uv
173-
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
174+
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
174175
with:
176+
activate-environment: true
175177
enable-cache: true
176178
python-version: ${{ matrix.python-version }}
177179
cache-dependency-glob: "requirements**.txt"
@@ -270,7 +272,7 @@ jobs:
270272
mypy freqtrade scripts
271273
272274
- name: Discord notification
273-
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
275+
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
274276
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
275277
with:
276278
severity: info
@@ -296,8 +298,9 @@ jobs:
296298
python-version: ${{ matrix.python-version }}
297299

298300
- name: Install uv
299-
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
301+
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
300302
with:
303+
activate-environment: true
301304
enable-cache: true
302305
python-version: ${{ matrix.python-version }}
303306
cache-dependency-glob: "requirements**.txt"
@@ -363,7 +366,7 @@ jobs:
363366
shell: powershell
364367

365368
- name: Discord notification
366-
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
369+
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
367370
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
368371
with:
369372
severity: error
@@ -421,7 +424,7 @@ jobs:
421424
mkdocs build
422425
423426
- name: Discord notification
424-
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
427+
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
425428
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
426429
with:
427430
severity: error
@@ -443,8 +446,9 @@ jobs:
443446
python-version: "3.12"
444447

445448
- name: Install uv
446-
uses: astral-sh/setup-uv@d4b2f3b6ecc6e67c4457f6d3e41ec42d3d0fcb86 # v5.4.2
449+
uses: astral-sh/setup-uv@c7f87aa956e4c323abf06d5dec078e358f6b4d04 # v6.0.0
447450
with:
451+
activate-environment: true
448452
enable-cache: true
449453
python-version: "3.12"
450454
cache-dependency-glob: "requirements**.txt"
@@ -508,7 +512,7 @@ jobs:
508512
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
509513

510514
- name: Discord notification
511-
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
515+
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
512516
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
513517
with:
514518
severity: info
@@ -703,7 +707,7 @@ jobs:
703707
build_helpers/publish_docker_arm64.sh
704708
705709
- name: Discord notification
706-
uses: rjstone/discord-webhook-notify@89b0bf43c2c8514f70d0dcba4a706b904e8a3112 #v1.0.4
710+
uses: rjstone/discord-webhook-notify@1399c1b2d57cc05894d506d2cfdc33c5f012b993 #v1.1.1
707711
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false) && (github.event_name != 'schedule')
708712
with:
709713
severity: info

build_helpers/pyarrow-19.0.1-cp311-cp311-linux_armv7l.whl renamed to build_helpers/pyarrow-20.0.0-cp311-cp311-linux_armv7l.whl

16.4 MB
Binary file not shown.

docs/requirements-docs.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,6 @@ markdown==3.8
22
mkdocs==1.6.1
33
mkdocs-material==9.6.12
44
mdx_truly_sane_lists==1.3
5-
pymdown-extensions==10.14.3
5+
pymdown-extensions==10.15
66
jinja2==3.1.6
77
mike==2.1.3
Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
# flake8: noqa: F401
2+
from .bt_fileutils import (
3+
BT_DATA_COLUMNS,
4+
delete_backtest_result,
5+
extract_trades_of_period,
6+
find_existing_backtest_stats,
7+
get_backtest_market_change,
8+
get_backtest_result,
9+
get_backtest_resultlist,
10+
get_latest_backtest_filename,
11+
get_latest_hyperopt_file,
12+
get_latest_hyperopt_filename,
13+
get_latest_optimize_filename,
14+
load_and_merge_backtest_result,
15+
load_backtest_analysis_data,
16+
load_backtest_data,
17+
load_backtest_metadata,
18+
load_backtest_stats,
19+
load_exit_signal_candles,
20+
load_file_from_zip,
21+
load_rejected_signals,
22+
load_signal_candles,
23+
load_trades,
24+
load_trades_from_db,
25+
trade_list_to_dataframe,
26+
update_backtest_metadata,
27+
)
28+
from .trade_parallelism import (
29+
analyze_trade_parallelism,
30+
evaluate_result_multi,
31+
)
Lines changed: 1 addition & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
import numpy as np
1414
import pandas as pd
1515

16-
from freqtrade.constants import LAST_BT_RESULT_FN, IntOrInf
16+
from freqtrade.constants import LAST_BT_RESULT_FN
1717
from freqtrade.exceptions import ConfigurationError, OperationalException
1818
from freqtrade.ft_types import BacktestHistoryEntryType, BacktestResultType
1919
from freqtrade.misc import file_dump_json, json_load
@@ -491,55 +491,6 @@ def load_exit_signal_candles(backtest_dir: Path) -> dict[str, dict[str, pd.DataF
491491
return load_backtest_analysis_data(backtest_dir, "exited")
492492

493493

494-
def analyze_trade_parallelism(results: pd.DataFrame, timeframe: str) -> pd.DataFrame:
495-
"""
496-
Find overlapping trades by expanding each trade once per period it was open
497-
and then counting overlaps.
498-
:param results: Results Dataframe - can be loaded
499-
:param timeframe: Timeframe used for backtest
500-
:return: dataframe with open-counts per time-period in timeframe
501-
"""
502-
from freqtrade.exchange import timeframe_to_resample_freq
503-
504-
timeframe_freq = timeframe_to_resample_freq(timeframe)
505-
dates = [
506-
pd.Series(
507-
pd.date_range(
508-
row[1]["open_date"],
509-
row[1]["close_date"],
510-
freq=timeframe_freq,
511-
# Exclude right boundary - the date is the candle open date.
512-
inclusive="left",
513-
)
514-
)
515-
for row in results[["open_date", "close_date"]].iterrows()
516-
]
517-
deltas = [len(x) for x in dates]
518-
dates = pd.Series(pd.concat(dates).values, name="date")
519-
df2 = pd.DataFrame(np.repeat(results.values, deltas, axis=0), columns=results.columns)
520-
521-
df2 = pd.concat([dates, df2], axis=1)
522-
df2 = df2.set_index("date")
523-
df_final = df2.resample(timeframe_freq)[["pair"]].count()
524-
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
525-
return df_final
526-
527-
528-
def evaluate_result_multi(
529-
results: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
530-
) -> pd.DataFrame:
531-
"""
532-
Find overlapping trades by expanding each trade once per period it was open
533-
and then counting overlaps
534-
:param results: Results Dataframe - can be loaded
535-
:param timeframe: Frequency used for the backtest
536-
:param max_open_trades: parameter max_open_trades used during backtest run
537-
:return: dataframe with open-counts per time-period in freq
538-
"""
539-
df_final = analyze_trade_parallelism(results, timeframe)
540-
return df_final[df_final["open_trades"] > max_open_trades]
541-
542-
543494
def trade_list_to_dataframe(trades: list[Trade] | list[LocalTrade]) -> pd.DataFrame:
544495
"""
545496
Convert list of Trade objects to pandas Dataframe
Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
import logging
2+
3+
import numpy as np
4+
import pandas as pd
5+
6+
from freqtrade.constants import IntOrInf
7+
8+
9+
logger = logging.getLogger(__name__)
10+
11+
12+
def analyze_trade_parallelism(trades: pd.DataFrame, timeframe: str) -> pd.DataFrame:
13+
"""
14+
Find overlapping trades by expanding each trade once per period it was open
15+
and then counting overlaps.
16+
:param trades: Trades Dataframe - can be loaded from backtest, or created
17+
via trade_list_to_dataframe
18+
:param timeframe: Timeframe used for backtest
19+
:return: dataframe with open-counts per time-period in timeframe
20+
"""
21+
from freqtrade.exchange import timeframe_to_resample_freq
22+
23+
timeframe_freq = timeframe_to_resample_freq(timeframe)
24+
dates = [
25+
pd.Series(
26+
pd.date_range(
27+
row[1]["open_date"],
28+
row[1]["close_date"],
29+
freq=timeframe_freq,
30+
# Exclude right boundary - the date is the candle open date.
31+
inclusive="left",
32+
)
33+
)
34+
for row in trades[["open_date", "close_date"]].iterrows()
35+
]
36+
deltas = [len(x) for x in dates]
37+
dates = pd.Series(pd.concat(dates).values, name="date")
38+
df2 = pd.DataFrame(np.repeat(trades.values, deltas, axis=0), columns=trades.columns)
39+
40+
df2 = pd.concat([dates, df2], axis=1)
41+
df2 = df2.set_index("date")
42+
df_final = df2.resample(timeframe_freq)[["pair"]].count()
43+
df_final = df_final.rename({"pair": "open_trades"}, axis=1)
44+
return df_final
45+
46+
47+
def evaluate_result_multi(
48+
trades: pd.DataFrame, timeframe: str, max_open_trades: IntOrInf
49+
) -> pd.DataFrame:
50+
"""
51+
Find overlapping trades by expanding each trade once per period it was open
52+
and then counting overlaps
53+
:param trades: Trades Dataframe - can be loaded from backtest, or created
54+
via trade_list_to_dataframe
55+
:param timeframe: Frequency used for the backtest
56+
:param max_open_trades: parameter max_open_trades used during backtest run
57+
:return: dataframe with open-counts per time-period in freq
58+
"""
59+
df_final = analyze_trade_parallelism(trades, timeframe)
60+
return df_final[df_final["open_trades"] > max_open_trades]

freqtrade/optimize/backtesting.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def __init__(self, config: Config, exchange: Exchange | None = None) -> None:
123123
config["dry_run"] = True
124124
self.run_ids: dict[str, str] = {}
125125
self.strategylist: list[IStrategy] = []
126-
self.all_results: dict[str, BacktestContentType] = {}
126+
self.all_bt_content: dict[str, BacktestContentType] = {}
127127
self.analysis_results: dict[str, dict[str, DataFrame]] = {
128128
"signals": {},
129129
"rejected": {},
@@ -1717,7 +1717,7 @@ def backtest_one_strategy(
17171717
"backtest_end_time": int(backtest_end_time.timestamp()),
17181718
}
17191719
)
1720-
self.all_results[strategy_name] = results
1720+
self.all_bt_content[strategy_name] = results
17211721

17221722
if (
17231723
self.config.get("export", "none") == "signals"
@@ -1780,9 +1780,9 @@ def start(self) -> None:
17801780
min_date, max_date = self.backtest_one_strategy(strat, data, timerange)
17811781

17821782
# Update old results with new ones.
1783-
if len(self.all_results) > 0:
1783+
if len(self.all_bt_content) > 0:
17841784
results = generate_backtest_stats(
1785-
data, self.all_results, min_date=min_date, max_date=max_date
1785+
data, self.all_bt_content, min_date=min_date, max_date=max_date
17861786
)
17871787
if self.results:
17881788
self.results["metadata"].update(results["metadata"])

freqtrade/persistence/key_value_store.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from datetime import datetime, timezone
22
from enum import Enum
3-
from typing import ClassVar
3+
from typing import ClassVar, Literal
44

55
from sqlalchemy import String
66
from sqlalchemy.orm import Mapped, mapped_column
@@ -18,9 +18,11 @@ class ValueTypesEnum(str, Enum):
1818
INT = "int"
1919

2020

21-
class KeyStoreKeys(str, Enum):
22-
BOT_START_TIME = "bot_start_time"
23-
STARTUP_TIME = "startup_time"
21+
KeyStoreKeys = Literal[
22+
"bot_start_time",
23+
"startup_time",
24+
"binance_migration",
25+
]
2426

2527

2628
class _KeyValueStoreModel(ModelBase):
@@ -192,7 +194,7 @@ def get_int_value(key: KeyStoreKeys) -> int | None:
192194
return kv.int_value
193195

194196

195-
def set_startup_time():
197+
def set_startup_time() -> None:
196198
"""
197199
sets bot_start_time to the first trade open date - or "now" on new databases.
198200
sets startup_time to "now"

freqtrade/persistence/migrations.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import logging
22

3-
from sqlalchemy import inspect, select, text, update
3+
from sqlalchemy import Engine, inspect, select, text, update
44

55
from freqtrade.exceptions import OperationalException
66
from freqtrade.persistence.trade_model import Order, Trade
@@ -9,7 +9,7 @@
99
logger = logging.getLogger(__name__)
1010

1111

12-
def get_table_names_for_table(inspector, tabletype) -> list[str]:
12+
def get_table_names_for_table(inspector, tabletype: str) -> list[str]:
1313
return [t for t in inspector.get_table_names() if t.startswith(tabletype)]
1414

1515

@@ -350,7 +350,7 @@ def fix_wrong_max_stake_amount(engine):
350350
connection.execute(stmt)
351351

352352

353-
def check_migrate(engine, decl_base, previous_tables) -> None:
353+
def check_migrate(engine: Engine, decl_base, previous_tables: list[str]) -> None:
354354
"""
355355
Checks if migration is necessary and migrates if necessary
356356
"""

freqtrade/rpc/api_server/api_backtest.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,10 @@ def __run_backtest_bg(btconfig: Config):
9696
)
9797

9898
ApiBG.bt["bt"].results = generate_backtest_stats(
99-
ApiBG.bt["data"], ApiBG.bt["bt"].all_results, min_date=min_date, max_date=max_date
99+
ApiBG.bt["data"],
100+
ApiBG.bt["bt"].all_bt_content,
101+
min_date=min_date,
102+
max_date=max_date,
100103
)
101104

102105
if btconfig.get("export", "none") == "trades":

0 commit comments

Comments
 (0)