first add
Some checks failed
Pre-commit auto-update / auto-update (push) Has been cancelled

This commit is contained in:
Ubuntu 2025-04-21 21:11:51 +08:00
commit 17199e9a44
657 changed files with 212934 additions and 0 deletions

7
.coveragerc Normal file
View File

@ -0,0 +1,7 @@
[run]
omit =
scripts/*
freqtrade/templates/*
freqtrade/vendor/*
freqtrade/__main__.py
tests/*

View File

@ -0,0 +1,44 @@
{
"name": "freqtrade Develop",
"image": "ghcr.io/freqtrade/freqtrade-devcontainer:latest",
// Use 'forwardPorts' to make a list of ports inside the container available locally.
"forwardPorts": [
8080
],
"workspaceMount": "source=${localWorkspaceFolder},target=/workspaces/freqtrade,type=bind,consistency=cached",
// Uncomment to connect as a non-root user if you've added one. See https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "ftuser",
"onCreateCommand": "pip install --user -e .",
"postCreateCommand": "freqtrade create-userdir --userdir user_data/",
"workspaceFolder": "/workspaces/freqtrade",
"customizations": {
"vscode": {
"settings": {
"terminal.integrated.shell.linux": "/bin/bash",
"editor.insertSpaces": true,
"files.trimTrailingWhitespace": true,
"[markdown]": {
"files.trimTrailingWhitespace": false
},
"python.pythonPath": "/usr/local/bin/python",
"[python]": {
"editor.codeActionsOnSave": {
"source.organizeImports": "explicit"
},
"editor.formatOnSave": true,
"editor.defaultFormatter": "charliermarsh.ruff"
}
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"charliermarsh.ruff",
"davidanson.vscode-markdownlint",
"ms-azuretools.vscode-docker",
"vscode-icons-team.vscode-icons",
"github.vscode-github-actions",
],
}
}
}

25
.dockerignore Normal file
View File

@ -0,0 +1,25 @@
.git
.gitignore
Dockerfile
Dockerfile.armhf
.dockerignore
docker/
.coveragerc
.eggs
.github
.pylintrc
.travis.yml
CONTRIBUTING.md
MANIFEST.in
README.md
freqtrade.service
freqtrade.egg-info
config.json*
*.sqlite
user_data
*.log
.vscode
.mypy_cache
.ipynb_checkpoints

3
.gitattributes vendored Normal file
View File

@ -0,0 +1,3 @@
*.py eol=lf
*.sh eol=lf
*.ps1 eol=crlf

21
.github/.devcontainer/Dockerfile vendored Normal file
View File

@ -0,0 +1,21 @@
FROM freqtradeorg/freqtrade:develop_freqairl
USER root
# Install dependencies
COPY requirements-dev.txt /freqtrade/
ARG USERNAME=ftuser
RUN apt-get update \
&& apt-get -y install --no-install-recommends apt-utils dialog git ssh vim build-essential zsh \
&& apt-get clean \
&& mkdir -p /home/${USERNAME}/.vscode-server /home/${USERNAME}/.vscode-server-insiders /home/${USERNAME}/commandhistory \
&& chown ${USERNAME}:${USERNAME} -R /home/${USERNAME}/.local/ \
&& chown ${USERNAME}: -R /home/${USERNAME}/
USER ftuser
RUN pip install --user autopep8 -r docs/requirements-docs.txt -r requirements-dev.txt --no-cache-dir
# Empty the ENTRYPOINT to allow all commands
ENTRYPOINT []

12
.github/.devcontainer/devcontainer.json vendored Normal file
View File

@ -0,0 +1,12 @@
{
"name": "freqtrade Dev container image builder",
"build": {
"dockerfile": "Dockerfile",
"context": "../../"
},
"features": {
"ghcr.io/devcontainers/features/common-utils:2": {
},
"ghcr.io/stuartleeks/dev-container-features/shell-history:0.0.3": {}
}
}

3
.github/FUNDING.yml vendored Normal file
View File

@ -0,0 +1,3 @@
# These are supported funding model platforms
github: [xmatthias]

48
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,48 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: "Triage Needed"
assignees: ''
---
<!--
Have you searched for similar issues before posting it?
If you have discovered a bug in the bot, please [search the issue tracker](https://github.com/freqtrade/freqtrade/issues?q=is%3Aissue).
If it hasn't been reported, please create a new issue.
Please do not use bug reports to request new features.
-->
## Describe your environment
* Operating system: ____
* Python Version: _____ (`python -V`)
* CCXT version: _____ (`pip freeze | grep ccxt`)
* Freqtrade Version: ____ (`freqtrade -V` or `docker compose run --rm freqtrade -V` for Freqtrade running in docker)
Note: All issues other than enhancement requests will be closed without further comment if the above template is deleted or not filled out.
## Describe the problem:
*Explain the problem you have encountered*
### Steps to reproduce:
1. _____
2. _____
3. _____
### Observed Results:
* What happened?
* What did you expect to happen?
### Relevant code exceptions or logs
Note: Please copy/paste text of the messages, no screenshots of logs please.
```
// paste your log here
```

6
.github/ISSUE_TEMPLATE/config.yml vendored Normal file
View File

@ -0,0 +1,6 @@
---
blank_issues_enabled: false
contact_links:
- name: Discord Server
url: https://discord.gg/p7nuUNVfP7
about: Ask a question or get community support from our Discord server

View File

@ -0,0 +1,26 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
<!--
Note: this section will not show up in the issue.
Have you search for this feature before requesting it? It's highly likely that a similar request was already filed.
-->
## Describe your environment
(if applicable)
* Operating system: ____
* Python Version: _____ (`python -V`)
* CCXT version: _____ (`pip freeze | grep ccxt`)
* Freqtrade Version: ____ (`freqtrade -V` or `docker compose run --rm freqtrade -V` for Freqtrade running in docker)
## Describe the enhancement
*Explain the enhancement you would like*

25
.github/ISSUE_TEMPLATE/question.md vendored Normal file
View File

@ -0,0 +1,25 @@
---
name: Question
about: Ask a question you could not find an answer in the docs
title: ''
labels: "Question"
assignees: ''
---
<!--
Have you searched for similar issues before posting it?
Did you have a VERY good look at the [documentation](https://www.freqtrade.io/en/latest/) and are sure that the question is not explained there
Please do not use the question template to report bugs or to request new features.
-->
## Describe your environment
* Operating system: ____
* Python Version: _____ (`python -V`)
* CCXT version: _____ (`pip freeze | grep ccxt`)
* Freqtrade Version: ____ (`freqtrade -V` or `docker compose run --rm freqtrade -V` for Freqtrade running in docker)
## Your question
*Ask the question you have not been able to find an answer in the [Documentation](https://www.freqtrade.io/en/latest/)*

17
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@ -0,0 +1,17 @@
<!-- Thank you for sending your pull request. But first, have you included
unit tests, and is your code PEP8 conformant? [More details](https://github.com/freqtrade/freqtrade/blob/develop/CONTRIBUTING.md)
-->
## Summary
<!-- Explain in one sentence the goal of this PR -->
Solve the issue: #___
## Quick changelog
- <change log 1>
- <change log 1>
## What's new?
<!-- Explain in details what this PR solve or improve. You can include visuals. -->

38
.github/dependabot.yml vendored Normal file
View File

@ -0,0 +1,38 @@
version: 2
updates:
- package-ecosystem: docker
directories:
- "/"
- "/docker"
schedule:
interval: daily
ignore:
- dependency-name: "*"
update-types: ["version-update:semver-major"]
open-pull-requests-limit: 10
- package-ecosystem: pip
directory: "/"
schedule:
interval: weekly
time: "03:00"
timezone: "Etc/UTC"
open-pull-requests-limit: 15
target-branch: develop
groups:
types:
patterns:
- "types-*"
pytest:
patterns:
- "pytest*"
mkdocs:
patterns:
- "mkdocs*"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
open-pull-requests-limit: 10
target-branch: develop

View File

@ -0,0 +1,50 @@
name: Binance Leverage tiers update
on:
schedule:
- cron: "0 3 * * 4"
# on demand
workflow_dispatch:
permissions:
contents: read
jobs:
auto-update:
runs-on: ubuntu-latest
environment:
name: develop
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install ccxt
run: pip install ccxt
- name: Run leverage tier update
env:
CI_WEB_PROXY: ${{ secrets.CI_WEB_PROXY }}
FREQTRADE__EXCHANGE__KEY: ${{ secrets.BINANCE_EXCHANGE_KEY }}
FREQTRADE__EXCHANGE__SECRET: ${{ secrets.BINANCE_EXCHANGE_SECRET }}
run: python build_helpers/binance_update_lev_tiers.py
- uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.REPO_SCOPED_TOKEN }}
add-paths: freqtrade/exchange/binance_leverage_tiers.json
labels: |
Tech maintenance
Dependencies
branch: update/binance-leverage-tiers
title: Update Binance Leverage Tiers
commit-message: "chore: update pre-commit hooks"
committer: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
author: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
body: Update binance leverage tiers.
delete-branch: true

711
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,711 @@
name: Freqtrade CI
on:
push:
branches:
- stable
- develop
- ci/*
tags:
release:
types: [published]
pull_request:
schedule:
- cron: '0 3 * * 4'
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }}"
cancel-in-progress: true
permissions:
repository-projects: read
jobs:
build-linux:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ "ubuntu-22.04", "ubuntu-24.04" ]
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
cache-suffix: "${{ matrix.python-version }}"
prune-cache: false
- name: Cache_dependencies
uses: actions/cache@v4
id: cache
with:
path: ~/dependencies/
key: ${{ runner.os }}-dependencies
- name: TA binary *nix
if: steps.cache.outputs.cache-hit != 'true'
run: |
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
- name: Installation - *nix
run: |
uv pip install --upgrade wheel
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
export TA_INCLUDE_PATH=${HOME}/dependencies/include
uv pip install -r requirements-dev.txt
uv pip install -e ft_client/
uv pip install -e .
- name: Check for version alignment
run: |
python build_helpers/freqtrade_client_version_align.py
- name: Tests
if: (!(runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04'))
run: |
pytest --random-order
- name: Tests with Coveralls
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04')
run: |
pytest --random-order --cov=freqtrade --cov=freqtrade_client --cov-config=.coveragerc
- name: Coveralls
if: (runner.os == 'Linux' && matrix.python-version == '3.12' && matrix.os == 'ubuntu-24.04')
env:
# Coveralls token. Not used as secret due to github not providing secrets to forked repositories
COVERALLS_REPO_TOKEN: 6D1m0xupS3FgutfuGao8keFf9Hc0FpIXu
run: |
# Allow failure for coveralls
coveralls || true
- name: Run json schema extract
# This should be kept before the repository check to ensure that the schema is up-to-date
run: |
python build_helpers/extract_config_json_schema.py
- name: Run command docs partials extract
# This should be kept before the repository check to ensure that the docs are up-to-date
run: |
python build_helpers/create_command_partials.py
- name: Check for repository changes
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "Repository is dirty, changes detected:"
git status
git diff
exit 1
else
echo "Repository is clean, no changes detected."
fi
- name: Backtesting (multi)
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade new-strategy -s AwesomeStrategy
freqtrade new-strategy -s AwesomeStrategyMin --template minimal
freqtrade backtesting --datadir tests/testdata --strategy-list AwesomeStrategy AwesomeStrategyMin -i 5m
- name: Hyperopt
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 6 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Sort imports (isort)
run: |
isort --check .
- name: Run Ruff
run: |
ruff check --output-format=github
- name: Run Ruff format check
run: |
ruff format --check
- name: Mypy
if: matrix.os == 'ubuntu-24.04'
run: |
mypy freqtrade scripts tests
- name: Discord notification
uses: rjstone/discord-webhook-notify@v1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
details: Freqtrade CI failed on ${{ matrix.os }}
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build-macos:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ "macos-13", "macos-14", "macos-15" ]
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
check-latest: true
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
cache-suffix: "${{ matrix.python-version }}"
prune-cache: false
- name: Cache_dependencies
uses: actions/cache@v4
id: cache
with:
path: ~/dependencies/
key: ${{ matrix.os }}-dependencies
- name: TA binary *nix
if: steps.cache.outputs.cache-hit != 'true'
run: |
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
- name: Installation - macOS (Brew)
run: |
# brew update
# TODO: Should be the brew upgrade
# homebrew fails to update python due to unlinking failures
# https://github.com/actions/runner-images/issues/6817
rm /usr/local/bin/2to3 || true
rm /usr/local/bin/2to3-3.11 || true
rm /usr/local/bin/2to3-3.12 || true
rm /usr/local/bin/idle3 || true
rm /usr/local/bin/idle3.11 || true
rm /usr/local/bin/idle3.12 || true
rm /usr/local/bin/pydoc3 || true
rm /usr/local/bin/pydoc3.11 || true
rm /usr/local/bin/pydoc3.12 || true
rm /usr/local/bin/python3 || true
rm /usr/local/bin/python3.11 || true
rm /usr/local/bin/python3.12 || true
rm /usr/local/bin/python3-config || true
rm /usr/local/bin/python3.11-config || true
rm /usr/local/bin/python3.12-config || true
brew install libomp
- name: Installation (python)
run: |
uv pip install wheel
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
export TA_INCLUDE_PATH=${HOME}/dependencies/include
uv pip install -r requirements-dev.txt
uv pip install -e ft_client/
uv pip install -e .
- name: Tests
run: |
pytest --random-order
- name: Check for repository changes
run: |
if [ -n "$(git status --porcelain)" ]; then
echo "Repository is dirty, changes detected:"
git status
git diff
exit 1
else
echo "Repository is clean, no changes detected."
fi
- name: Backtesting
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade new-strategy -s AwesomeStrategyAdv --template advanced
freqtrade backtesting --datadir tests/testdata --strategy AwesomeStrategyAdv
- name: Hyperopt
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Sort imports (isort)
run: |
isort --check .
- name: Run Ruff
run: |
ruff check --output-format=github
- name: Run Ruff format check
run: |
ruff format --check
- name: Mypy
if: matrix.os == 'macos-15'
run: |
mypy freqtrade scripts
- name: Discord notification
uses: rjstone/discord-webhook-notify@v1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: info
details: Test Succeeded!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build-windows:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ windows-latest ]
python-version: ["3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: ${{ matrix.python-version }}
cache-dependency-glob: "requirements**.txt"
cache-suffix: "${{ matrix.python-version }}"
prune-cache: false
- name: Installation
run: |
function uvpipFunction { uv pip $args }
Set-Alias -name pip -value uvpipFunction
./build_helpers/install_windows.ps1
- name: Tests
run: |
pytest --random-order --durations 20 -n auto
- name: Check for repository changes
run: |
if (git status --porcelain) {
Write-Host "Repository is dirty, changes detected:"
git status
git diff
exit 1
}
else {
Write-Host "Repository is clean, no changes detected."
}
- name: Backtesting
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade backtesting --datadir tests/testdata --strategy SampleStrategy
- name: Hyperopt
run: |
cp tests/testdata/config.tests.json config.json
freqtrade create-userdir --userdir user_data
freqtrade hyperopt --datadir tests/testdata -e 5 --strategy SampleStrategy --hyperopt-loss SharpeHyperOptLossDaily --print-all
- name: Run Ruff
run: |
ruff check --output-format=github
- name: Run Ruff format check
run: |
ruff format --check
- name: Mypy
run: |
mypy freqtrade scripts tests
- name: Run Pester tests (PowerShell)
run: |
$PSVersionTable
Set-PSRepository psgallery -InstallationPolicy trusted
Install-Module -Name Pester -RequiredVersion 5.3.1 -Confirm:$false -Force -SkipPublisherCheck
$Error.clear()
Invoke-Pester -Path "tests" -CI
if ($Error.Length -gt 0) {exit 1}
shell: powershell
- name: Discord notification
uses: rjstone/discord-webhook-notify@v1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
details: Test Failed
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
mypy-version-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: pre-commit dependencies
run: |
pip install pyaml
python build_helpers/pre_commit_update.py
pre-commit:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- uses: pre-commit/action@v3.0.1
docs-check:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Documentation syntax
run: |
./tests/test_docs.sh
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Documentation build
run: |
pip install -r docs/requirements-docs.txt
mkdocs build
- name: Discord notification
uses: rjstone/discord-webhook-notify@v1
if: failure() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: error
details: Freqtrade doc test failed!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build-linux-online:
# Run pytest with "live" checks
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install uv
uses: astral-sh/setup-uv@v5
with:
enable-cache: true
python-version: "3.12"
cache-dependency-glob: "requirements**.txt"
cache-suffix: "3.12"
prune-cache: false
- name: Cache_dependencies
uses: actions/cache@v4
id: cache
with:
path: ~/dependencies/
key: ${{ runner.os }}-dependencies
- name: TA binary *nix
if: steps.cache.outputs.cache-hit != 'true'
run: |
cd build_helpers && ./install_ta-lib.sh ${HOME}/dependencies/; cd ..
- name: Installation - *nix
run: |
uv pip install --upgrade wheel
export LD_LIBRARY_PATH=${HOME}/dependencies/lib:$LD_LIBRARY_PATH
export TA_LIBRARY_PATH=${HOME}/dependencies/lib
export TA_INCLUDE_PATH=${HOME}/dependencies/include
uv pip install -r requirements-dev.txt
uv pip install -e ft_client/
uv pip install -e .
- name: Tests incl. ccxt compatibility tests
env:
CI_WEB_PROXY: http://152.67.78.211:13128
run: |
pytest --random-order --longrun --durations 20 -n auto
# Notify only once - when CI completes (and after deploy) in case it's successful
notify-complete:
needs: [
build-linux,
build-macos,
build-windows,
docs-check,
mypy-version-check,
pre-commit,
build-linux-online
]
runs-on: ubuntu-22.04
# Discord notification can't handle schedule events
if: github.event_name != 'schedule' && github.repository == 'freqtrade/freqtrade'
permissions:
repository-projects: read
steps:
- name: Check user permission
id: check
uses: scherermichael-oss/action-has-permission@1.0.6
with:
required-permission: write
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Discord notification
uses: rjstone/discord-webhook-notify@v1
if: always() && steps.check.outputs.has-permission && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false)
with:
severity: info
details: Test Completed!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}
build:
name: "Build"
needs: [ build-linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Build distribution
run: |
pip install -U build
python -m build --sdist --wheel
- name: Upload artifacts 📦
uses: actions/upload-artifact@v4
with:
name: freqtrade-build
path: |
dist
retention-days: 10
- name: Build Client distribution
run: |
pip install -U build
python -m build --sdist --wheel ft_client
- name: Upload artifacts 📦
uses: actions/upload-artifact@v4
with:
name: freqtrade-client-build
path: |
ft_client/dist
retention-days: 10
deploy-test-pypi:
name: "Publish Python 🐍 distribution 📦 to TestPyPI"
needs: [ build ]
runs-on: ubuntu-22.04
if: (github.event_name == 'release')
environment:
name: testpypi
url: https://test.pypi.org/p/freqtrade
permissions:
id-token: write
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Download artifact 📦
uses: actions/download-artifact@v4
with:
pattern: freqtrade*-build
path: dist
merge-multiple: true
- name: Publish to PyPI (Test)
uses: pypa/gh-action-pypi-publish@v1.12.4
with:
repository-url: https://test.pypi.org/legacy/
deploy-pypi:
name: "Publish Python 🐍 distribution 📦 to PyPI"
needs: [ build ]
runs-on: ubuntu-22.04
if: (github.event_name == 'release')
environment:
name: pypi
url: https://pypi.org/p/freqtrade
permissions:
id-token: write
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Download artifact 📦
uses: actions/download-artifact@v4
with:
pattern: freqtrade*-build
path: dist
merge-multiple: true
- name: Publish to PyPI
uses: pypa/gh-action-pypi-publish@v1.12.4
deploy-docker:
needs: [ build-linux, build-macos, build-windows, docs-check, mypy-version-check, pre-commit ]
runs-on: ubuntu-22.04
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Extract branch name
id: extract-branch
run: |
echo "GITHUB_REF='${GITHUB_REF}'"
echo "branch=${GITHUB_REF##*/}" >> "$GITHUB_OUTPUT"
- name: Dockerhub login
env:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
run: |
echo "${DOCKER_PASSWORD}" | docker login --username ${DOCKER_USERNAME} --password-stdin
# We need docker experimental to pull the ARM image.
- name: Switch docker to experimental
run: |
docker version -f '{{.Server.Experimental}}'
echo $'{\n "experimental": true\n}' | sudo tee /etc/docker/daemon.json
sudo systemctl restart docker
docker version -f '{{.Server.Experimental}}'
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Available platforms
run: echo ${PLATFORMS}
env:
PLATFORMS: ${{ steps.buildx.outputs.platforms }}
- name: Build and test and push docker images
env:
BRANCH_NAME: ${{ steps.extract-branch.outputs.branch }}
run: |
build_helpers/publish_docker_multi.sh
deploy-arm:
name: "Deploy Docker"
permissions:
packages: write
needs: [ deploy-docker ]
# Only run on 64bit machines
runs-on: [self-hosted, linux, ARM64]
if: (github.event_name == 'push' || github.event_name == 'schedule' || github.event_name == 'release') && github.repository == 'freqtrade/freqtrade'
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Extract branch name
id: extract-branch
run: |
echo "GITHUB_REF='${GITHUB_REF}'"
echo "branch=${GITHUB_REF##*/}" >> "$GITHUB_OUTPUT"
- name: Dockerhub login
env:
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
run: |
echo "${DOCKER_PASSWORD}" | docker login --username ${DOCKER_USERNAME} --password-stdin
- name: Build and test and push docker images
env:
BRANCH_NAME: ${{ steps.extract-branch.outputs.branch }}
GHCR_USERNAME: ${{ github.actor }}
GHCR_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
build_helpers/publish_docker_arm64.sh
- name: Discord notification
uses: rjstone/discord-webhook-notify@v1
if: always() && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == false) && (github.event_name != 'schedule')
with:
severity: info
details: Deploy Succeeded!
webhookUrl: ${{ secrets.DISCORD_WEBHOOK }}

61
.github/workflows/deploy-docs.yml vendored Normal file
View File

@ -0,0 +1,61 @@
name: Build Documentation
on:
push:
branches:
- develop
release:
types: [published]
# disable permissions for all of the available permissions
permissions: {}
jobs:
build-docs:
permissions:
contents: write # for mike to push
name: Deploy Docs through mike
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: true
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.12'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r docs/requirements-docs.txt
- name: Fetch gh-pages branch
run: |
git fetch origin gh-pages --depth=1
- name: Configure Git user
run: |
git config --local user.email "github-actions[bot]@users.noreply.github.com"
git config --local user.name "github-actions[bot]"
- name: Build and push Mike
if: ${{ github.event_name == 'push' }}
run: |
mike deploy ${REF_NAME} latest --push --update-aliases
env:
REF_NAME: ${{ github.ref_name }}
- name: Build and push Mike - Release
if: ${{ github.event_name == 'release' }}
run: |
mike deploy ${REF_NAME} stable --push --update-aliases
env:
REF_NAME: ${{ github.ref_name }}
- name: Show mike versions
run: |
mike list

View File

@ -0,0 +1,42 @@
name: Devcontainer Pre-Build
on:
workflow_dispatch:
schedule:
- cron: "0 3 * * 0"
# push:
# branches:
# - "master"
# tags:
# - "v*.*.*"
# pull_requests:
# branches:
# - "master"
concurrency:
group: "${{ github.workflow }}"
cancel-in-progress: true
jobs:
build-and-push:
permissions:
packages: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Login to GitHub Container Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Pre-build dev container image
uses: devcontainers/ci@v0.3
with:
subFolder: .github
imageName: ghcr.io/${{ github.repository }}-devcontainer
cacheFrom: ghcr.io/${{ github.repository }}-devcontainer
push: always

View File

@ -0,0 +1,23 @@
name: Update Docker Hub Description
on:
push:
branches:
- stable
# disable permissions for all of the available permissions
permissions: {}
jobs:
dockerHubDescription:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- name: Docker Hub Description
uses: peter-evans/dockerhub-description@v4
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
repository: freqtradeorg/freqtrade

44
.github/workflows/pre-commit-update.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: Pre-commit auto-update
on:
schedule:
- cron: "0 3 * * 2"
# on demand
workflow_dispatch:
permissions:
contents: read
jobs:
auto-update:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install pre-commit
run: pip install pre-commit
- name: Run auto-update
run: pre-commit autoupdate
- uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.REPO_SCOPED_TOKEN }}
add-paths: .pre-commit-config.yaml
labels: |
Tech maintenance
Dependencies
branch: update/pre-commit-hooks
title: Update pre-commit hooks
commit-message: "chore: update pre-commit hooks"
committer: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
author: Freqtrade Bot <154552126+freqtrade-bot@users.noreply.github.com>
body: Update versions of pre-commit hooks to latest version.
delete-branch: true

119
.gitignore vendored Normal file
View File

@ -0,0 +1,119 @@
# Freqtrade rules
config*.json
*.sqlite
*.sqlite-shm
*.sqlite-wal
logfile.txt
user_data/*
!user_data/strategy/sample_strategy.py
!user_data/notebooks
!user_data/models
!user_data/freqaimodels
user_data/freqaimodels/*
user_data/models/*
user_data/notebooks/*
freqtrade-plot.html
freqtrade-profit-plot.html
freqtrade/rpc/api_server/ui/*
build_helpers/ta-lib/*
# Macos related
.DS_Store
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# memray
memray-*
# Sphinx documentation
docs/_build/
# Mkdocs documentation
site/
# PyBuilder
target/
# Jupyter Notebook
*.ipynb_checkpoints
# pyenv
.python-version
.env
.venv
.idea
.vscode
.pytest_cache/
.mypy_cache/
#exceptions
!*.gitkeep
!config_examples/config_binance.example.json
!config_examples/config_full.example.json
!config_examples/config_kraken.example.json
!config_examples/config_freqai.example.json
docker-compose-*.yml
data/

87
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,87 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: local
# Keep json schema in sync with the config schema
# This will write the files - and fail pre-commit if a file has been changed.
hooks:
- id: Extract config json schema
name: extract-config-json-schema
entry: "python build_helpers/extract_config_json_schema.py"
language: python
pass_filenames: false
additional_dependencies: ["python-rapidjson", "jsonschema"]
- repo: https://github.com/pycqa/flake8
rev: "7.2.0"
hooks:
- id: flake8
additional_dependencies: [Flake8-pyproject]
# stages: [push]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: "v1.15.0"
hooks:
- id: mypy
exclude: build_helpers
additional_dependencies:
- types-cachetools==5.5.0.20240820
- types-filelock==3.2.7
- types-requests==2.32.0.20250328
- types-tabulate==0.9.0.20241207
- types-python-dateutil==2.9.0.20241206
- SQLAlchemy==2.0.40
# stages: [push]
- repo: https://github.com/pycqa/isort
rev: "6.0.1"
hooks:
- id: isort
name: isort (python)
# stages: [push]
- repo: https://github.com/charliermarsh/ruff-pre-commit
# Ruff version.
rev: 'v0.11.2'
hooks:
- id: ruff
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: end-of-file-fixer
exclude: |
(?x)^(
tests/.*|
.*\.svg|
.*\.yml|
.*\.json
)$
- id: mixed-line-ending
- id: debug-statements
- id: check-ast
- id: trailing-whitespace
exclude: |
(?x)^(
.*\.md
)$
- repo: https://github.com/stefmolin/exif-stripper
rev: 0.6.2
hooks:
- id: strip-exif
- repo: https://github.com/codespell-project/codespell
rev: v2.4.1
hooks:
- id: codespell
additional_dependencies:
- tomli
# Ensure github actions remain safe
- repo: https://github.com/woodruffw/zizmor-pre-commit
rev: v1.5.2
hooks:
- id: zizmor

9
.pylintrc Normal file
View File

@ -0,0 +1,9 @@
[MASTER]
extension-pkg-whitelist=numpy,talib,talib.abstract
[BASIC]
good-names=logger
ignore=vendor
[TYPECHECK]
ignored-modules=numpy,talib,talib.abstract

14
.readthedocs.yml Normal file
View File

@ -0,0 +1,14 @@
# .readthedocs.yml
version: 2
build:
os: "ubuntu-22.04"
tools:
python: "3.11"
python:
install:
- requirements: docs/requirements-docs.txt
mkdocs:
configuration: mkdocs.yml

135
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,135 @@
# Contributing
## Contribute to freqtrade
Feel like our bot is missing a feature? We welcome your pull requests!
Issues labeled [good first issue](https://github.com/freqtrade/freqtrade/labels/good%20first%20issue) can be good first contributions, and will help get you familiar with the codebase.
Few pointers for contributions:
- Create your PR against the `develop` branch, not `stable`.
- New features need to contain unit tests, must conform to PEP8 (max-line-length = 100) and should be documented with the introduction PR.
- PR's can be declared as `[WIP]` - which signify Work in Progress Pull Requests (which are not finished).
If you are unsure, discuss the feature on our [discord server](https://discord.gg/p7nuUNVfP7) or in a [issue](https://github.com/freqtrade/freqtrade/issues) before a Pull Request.
## Getting started
Best start by reading the [documentation](https://www.freqtrade.io/) to get a feel for what is possible with the bot, or head straight to the [Developer-documentation](https://www.freqtrade.io/en/latest/developer/) (WIP) which should help you getting started.
## Before sending the PR
### 1. Run unit tests
All unit tests must pass. If a unit test is broken, change your code to
make it pass. It means you have introduced a regression.
#### Test the whole project
```bash
pytest
```
#### Test only one file
```bash
pytest tests/test_<file_name>.py
```
#### Test only one method from one file
```bash
pytest tests/test_<file_name>.py::test_<method_name>
```
### 2. Test if your code is PEP8 compliant
#### Run Ruff
```bash
ruff check .
```
We receive a lot of code that fails the `ruff` checks.
To help with that, we encourage you to install the git pre-commit
hook that will warn you when you try to commit code that fails these checks.
you can manually run pre-commit with `pre-commit run -a`.
##### Additional styles applied
* Have docstrings on all public methods
* Use double-quotes for docstrings
* Multiline docstrings should be indented to the level of the first quote
* Doc-strings should follow the reST format (`:param xxx: ...`, `:return: ...`, `:raises KeyError: ... `)
### 3. Test if all type-hints are correct
#### Run mypy
``` bash
mypy freqtrade
```
### 4. Ensure formatting is correct
#### Run ruff
``` bash
ruff format .
```
## (Core)-Committer Guide
### Process: Pull Requests
How to prioritize pull requests, from most to least important:
1. Fixes for broken tests. Broken means broken on any supported platform or Python version.
1. Extra tests to cover corner cases.
1. Minor edits to docs.
1. Bug fixes.
1. Major edits to docs.
1. Features.
Ensure that each pull request meets all requirements in the Contributing document.
### Process: Issues
If an issue is a bug that needs an urgent fix, mark it for the next patch release.
Then either fix it or mark as please-help.
For other issues: encourage friendly discussion, moderate debate, offer your thoughts.
### Process: Your own code changes
All code changes, regardless of who does them, need to be reviewed and merged by someone else.
This rule applies to all the core committers.
Exceptions:
- Minor corrections and fixes to pull requests submitted by others.
- While making a formal release, the release manager can make necessary, appropriate changes.
- Small documentation changes that reinforce existing subject matter. Most commonly being, but not limited to spelling and grammar corrections.
### Responsibilities
- Ensure cross-platform compatibility for every change that's accepted. Windows, Mac & Linux.
- Ensure no malicious code is introduced into the core code.
- Create issues for any major changes and enhancements that you wish to make. Discuss things transparently and get community feedback.
- Keep feature versions as small as possible, preferably one new feature per version.
- Be welcoming to newcomers and encourage diverse new contributors from all backgrounds. See the Python Community Code of Conduct (https://www.python.org/psf/codeofconduct/).
### Becoming a Committer
Contributors may be given commit privileges. Preference will be given to those with:
1. Past contributions to Freqtrade and other related open-source projects. Contributions to Freqtrade include both code (both accepted and pending) and friendly participation in the issue tracker and Pull request reviews. Both quantity and quality are considered.
1. A coding style that the other core committers find simple, minimal, and clean.
1. Access to resources for cross-platform development and testing.
1. Time to devote to the project regularly.
Being a Committer does not grant write permission on `develop` or `stable` for security reasons (Users trust Freqtrade with their Exchange API keys).
After being Committer for some time, a Committer may be named Core Committer and given full repository access.

58
Dockerfile Normal file
View File

@ -0,0 +1,58 @@
FROM python:3.12.9-slim-bookworm as base
# Setup env
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONFAULTHANDLER 1
ENV PATH=/home/ftuser/.local/bin:$PATH
ENV FT_APP_ENV="docker"
# Prepare environment
RUN mkdir /freqtrade \
&& apt-get update \
&& apt-get -y install sudo libatlas3-base curl sqlite3 libgomp1 \
&& apt-get clean \
&& useradd -u 1000 -G sudo -U -m -s /bin/bash ftuser \
&& chown ftuser:ftuser /freqtrade \
# Allow sudoers
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers
WORKDIR /freqtrade
# Install dependencies
FROM base as python-deps
RUN apt-get update \
&& apt-get -y install build-essential libssl-dev git libffi-dev libgfortran5 pkg-config cmake gcc \
&& apt-get clean \
&& pip install --upgrade pip wheel
# Install TA-lib
COPY build_helpers/* /tmp/
RUN cd /tmp && /tmp/install_ta-lib.sh && rm -r /tmp/*ta-lib*
ENV LD_LIBRARY_PATH /usr/local/lib
# Install dependencies
COPY --chown=ftuser:ftuser requirements.txt requirements-hyperopt.txt /freqtrade/
USER ftuser
RUN pip install --user --no-cache-dir "numpy<2.0" \
&& pip install --user --no-cache-dir -r requirements-hyperopt.txt
# Copy dependencies to runtime-image
FROM base as runtime-image
COPY --from=python-deps /usr/local/lib /usr/local/lib
ENV LD_LIBRARY_PATH /usr/local/lib
COPY --from=python-deps --chown=ftuser:ftuser /home/ftuser/.local /home/ftuser/.local
USER ftuser
# Install and execute
COPY --chown=ftuser:ftuser . /freqtrade/
RUN pip install -e . --user --no-cache-dir --no-build-isolation \
&& mkdir /freqtrade/user_data/ \
&& freqtrade install-ui
ENTRYPOINT ["freqtrade"]
# Default to trade mode
CMD [ "trade" ]

674
LICENSE Normal file
View File

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
{one line to give the program's name and a brief idea of what it does.}
Copyright (C) {year} {name of author}
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
{project} Copyright (C) {year} {fullname}
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

9
MANIFEST.in Normal file
View File

@ -0,0 +1,9 @@
include LICENSE
include README.md
recursive-include freqtrade *.py
recursive-include freqtrade/templates/ *.j2 *.ipynb
include freqtrade/exchange/binance_leverage_tiers.json
include freqtrade/rpc/api_server/ui/fallback_file.html
include freqtrade/rpc/api_server/ui/favicon.ico
prune tests

229
README.md Normal file
View File

@ -0,0 +1,229 @@
# ![freqtrade](https://raw.githubusercontent.com/freqtrade/freqtrade/develop/docs/assets/freqtrade_poweredby.svg)
[![Freqtrade CI](https://github.com/freqtrade/freqtrade/actions/workflows/ci.yml/badge.svg?branch=develop)](https://github.com/freqtrade/freqtrade/actions/)
[![DOI](https://joss.theoj.org/papers/10.21105/joss.04864/status.svg)](https://doi.org/10.21105/joss.04864)
[![Coverage Status](https://coveralls.io/repos/github/freqtrade/freqtrade/badge.svg?branch=develop&service=github)](https://coveralls.io/github/freqtrade/freqtrade?branch=develop)
[![Documentation](https://readthedocs.org/projects/freqtrade/badge/)](https://www.freqtrade.io)
[![Maintainability](https://api.codeclimate.com/v1/badges/5737e6d668200b7518ff/maintainability)](https://codeclimate.com/github/freqtrade/freqtrade/maintainability)
Freqtrade is a free and open source crypto trading bot written in Python. It is designed to support all major exchanges and be controlled via Telegram or webUI. It contains backtesting, plotting and money management tools as well as strategy optimization by machine learning.
![freqtrade](https://raw.githubusercontent.com/freqtrade/freqtrade/develop/docs/assets/freqtrade-screenshot.png)
## Disclaimer
This software is for educational purposes only. Do not risk money which
you are afraid to lose. USE THE SOFTWARE AT YOUR OWN RISK. THE AUTHORS
AND ALL AFFILIATES ASSUME NO RESPONSIBILITY FOR YOUR TRADING RESULTS.
Always start by running a trading bot in Dry-run and do not engage money
before you understand how it works and what profit/loss you should
expect.
We strongly recommend you to have coding and Python knowledge. Do not
hesitate to read the source code and understand the mechanism of this bot.
## Supported Exchange marketplaces
Please read the [exchange specific notes](docs/exchanges.md) to learn about eventual, special configurations needed for each exchange.
- [X] [Binance](https://www.binance.com/)
- [X] [Bitmart](https://bitmart.com/)
- [X] [BingX](https://bingx.com/invite/0EM9RX)
- [X] [Bybit](https://bybit.com/)
- [X] [Gate.io](https://www.gate.io/ref/6266643)
- [X] [HTX](https://www.htx.com/)
- [X] [Hyperliquid](https://hyperliquid.xyz/) (A decentralized exchange, or DEX)
- [X] [Kraken](https://kraken.com/)
- [X] [OKX](https://okx.com/)
- [X] [MyOKX](https://okx.com/) (OKX EEA)
- [ ] [potentially many others](https://github.com/ccxt/ccxt/). _(We cannot guarantee they will work)_
### Supported Futures Exchanges (experimental)
- [X] [Binance](https://www.binance.com/)
- [X] [Gate.io](https://www.gate.io/ref/6266643)
- [X] [Hyperliquid](https://hyperliquid.xyz/) (A decentralized exchange, or DEX)
- [X] [OKX](https://okx.com/)
- [X] [Bybit](https://bybit.com/)
Please make sure to read the [exchange specific notes](docs/exchanges.md), as well as the [trading with leverage](docs/leverage.md) documentation before diving in.
### Community tested
Exchanges confirmed working by the community:
- [X] [Bitvavo](https://bitvavo.com/)
- [X] [Kucoin](https://www.kucoin.com/)
## Documentation
We invite you to read the bot documentation to ensure you understand how the bot is working.
Please find the complete documentation on the [freqtrade website](https://www.freqtrade.io).
## Features
- [x] **Based on Python 3.10+**: For botting on any operating system - Windows, macOS and Linux.
- [x] **Persistence**: Persistence is achieved through sqlite.
- [x] **Dry-run**: Run the bot without paying money.
- [x] **Backtesting**: Run a simulation of your buy/sell strategy.
- [x] **Strategy Optimization by machine learning**: Use machine learning to optimize your buy/sell strategy parameters with real exchange data.
- [X] **Adaptive prediction modeling**: Build a smart strategy with FreqAI that self-trains to the market via adaptive machine learning methods. [Learn more](https://www.freqtrade.io/en/stable/freqai/)
- [x] **Edge position sizing** Calculate your win rate, risk reward ratio, the best stoploss and adjust your position size before taking a position for each specific market. [Learn more](https://www.freqtrade.io/en/stable/edge/).
- [x] **Whitelist crypto-currencies**: Select which crypto-currency you want to trade or use dynamic whitelists.
- [x] **Blacklist crypto-currencies**: Select which crypto-currency you want to avoid.
- [x] **Builtin WebUI**: Builtin web UI to manage your bot.
- [x] **Manageable via Telegram**: Manage the bot with Telegram.
- [x] **Display profit/loss in fiat**: Display your profit/loss in fiat currency.
- [x] **Performance status report**: Provide a performance status of your current trades.
## Quick start
Please refer to the [Docker Quickstart documentation](https://www.freqtrade.io/en/stable/docker_quickstart/) on how to get started quickly.
For further (native) installation methods, please refer to the [Installation documentation page](https://www.freqtrade.io/en/stable/installation/).
## Basic Usage
### Bot commands
```
usage: freqtrade [-h] [-V]
{trade,create-userdir,new-config,show-config,new-strategy,download-data,convert-data,convert-trade-data,trades-to-ohlcv,list-data,backtesting,backtesting-show,backtesting-analysis,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-markets,list-pairs,list-strategies,list-hyperoptloss,list-freqaimodels,list-timeframes,show-trades,test-pairlist,convert-db,install-ui,plot-dataframe,plot-profit,webserver,strategy-updater,lookahead-analysis,recursive-analysis}
...
Free, open source crypto trading bot
positional arguments:
{trade,create-userdir,new-config,show-config,new-strategy,download-data,convert-data,convert-trade-data,trades-to-ohlcv,list-data,backtesting,backtesting-show,backtesting-analysis,edge,hyperopt,hyperopt-list,hyperopt-show,list-exchanges,list-markets,list-pairs,list-strategies,list-hyperoptloss,list-freqaimodels,list-timeframes,show-trades,test-pairlist,convert-db,install-ui,plot-dataframe,plot-profit,webserver,strategy-updater,lookahead-analysis,recursive-analysis}
trade Trade module.
create-userdir Create user-data directory.
new-config Create new config
show-config Show resolved config
new-strategy Create new strategy
download-data Download backtesting data.
convert-data Convert candle (OHLCV) data from one format to
another.
convert-trade-data Convert trade data from one format to another.
trades-to-ohlcv Convert trade data to OHLCV data.
list-data List downloaded data.
backtesting Backtesting module.
backtesting-show Show past Backtest results
backtesting-analysis
Backtest Analysis module.
edge Edge module.
hyperopt Hyperopt module.
hyperopt-list List Hyperopt results
hyperopt-show Show details of Hyperopt results
list-exchanges Print available exchanges.
list-markets Print markets on exchange.
list-pairs Print pairs on exchange.
list-strategies Print available strategies.
list-hyperoptloss Print available hyperopt loss functions.
list-freqaimodels Print available freqAI models.
list-timeframes Print available timeframes for the exchange.
show-trades Show trades.
test-pairlist Test your pairlist configuration.
convert-db Migrate database to different system
install-ui Install FreqUI
plot-dataframe Plot candles with indicators.
plot-profit Generate plot showing profits.
webserver Webserver module.
strategy-updater updates outdated strategy files to the current version
lookahead-analysis Check for potential look ahead bias.
recursive-analysis Check for potential recursive formula issue.
options:
-h, --help show this help message and exit
-V, --version show program's version number and exit
```
### Telegram RPC commands
Telegram is not mandatory. However, this is a great way to control your bot. More details and the full command list on the [documentation](https://www.freqtrade.io/en/latest/telegram-usage/)
- `/start`: Starts the trader.
- `/stop`: Stops the trader.
- `/stopentry`: Stop entering new trades.
- `/status <trade_id>|[table]`: Lists all or specific open trades.
- `/profit [<n>]`: Lists cumulative profit from all finished trades, over the last n days.
- `/forceexit <trade_id>|all`: Instantly exits the given trade (Ignoring `minimum_roi`).
- `/fx <trade_id>|all`: Alias to `/forceexit`
- `/performance`: Show performance of each finished trade grouped by pair
- `/balance`: Show account balance per currency.
- `/daily <n>`: Shows profit or loss per day, over the last n days.
- `/help`: Show help message.
- `/version`: Show version.
## Development branches
The project is currently setup in two main branches:
- `develop` - This branch has often new features, but might also contain breaking changes. We try hard to keep this branch as stable as possible.
- `stable` - This branch contains the latest stable release. This branch is generally well tested.
- `feat/*` - These are feature branches, which are being worked on heavily. Please don't use these unless you want to test a specific feature.
## Support
### Help / Discord
For any questions not covered by the documentation or for further information about the bot, or to simply engage with like-minded individuals, we encourage you to join the Freqtrade [discord server](https://discord.gg/p7nuUNVfP7).
### [Bugs / Issues](https://github.com/freqtrade/freqtrade/issues?q=is%3Aissue)
If you discover a bug in the bot, please
[search the issue tracker](https://github.com/freqtrade/freqtrade/issues?q=is%3Aissue)
first. If it hasn't been reported, please
[create a new issue](https://github.com/freqtrade/freqtrade/issues/new/choose) and
ensure you follow the template guide so that the team can assist you as
quickly as possible.
For every [issue](https://github.com/freqtrade/freqtrade/issues/new/choose) created, kindly follow up and mark satisfaction or reminder to close issue when equilibrium ground is reached.
--Maintain github's [community policy](https://docs.github.com/en/site-policy/github-terms/github-community-code-of-conduct)--
### [Feature Requests](https://github.com/freqtrade/freqtrade/labels/enhancement)
Have you a great idea to improve the bot you want to share? Please,
first search if this feature was not [already discussed](https://github.com/freqtrade/freqtrade/labels/enhancement).
If it hasn't been requested, please
[create a new request](https://github.com/freqtrade/freqtrade/issues/new/choose)
and ensure you follow the template guide so that it does not get lost
in the bug reports.
### [Pull Requests](https://github.com/freqtrade/freqtrade/pulls)
Feel like the bot is missing a feature? We welcome your pull requests!
Please read the
[Contributing document](https://github.com/freqtrade/freqtrade/blob/develop/CONTRIBUTING.md)
to understand the requirements before sending your pull-requests.
Coding is not a necessity to contribute - maybe start with improving the documentation?
Issues labeled [good first issue](https://github.com/freqtrade/freqtrade/labels/good%20first%20issue) can be good first contributions, and will help get you familiar with the codebase.
**Note** before starting any major new feature work, *please open an issue describing what you are planning to do* or talk to us on [discord](https://discord.gg/p7nuUNVfP7) (please use the #dev channel for this). This will ensure that interested parties can give valuable feedback on the feature, and let others know that you are working on it.
**Important:** Always create your PR against the `develop` branch, not `stable`.
## Requirements
### Up-to-date clock
The clock must be accurate, synchronized to a NTP server very frequently to avoid problems with communication to the exchanges.
### Minimum hardware required
To run this bot we recommend you a cloud instance with a minimum of:
- Minimal (advised) system requirements: 2GB RAM, 1GB disk space, 2vCPU
### Software requirements
- [Python >= 3.10](http://docs.python-guide.org/en/latest/starting/installation/)
- [pip](https://pip.pypa.io/en/stable/installing/)
- [git](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git)
- [TA-Lib](https://ta-lib.github.io/ta-lib-python/)
- [virtualenv](https://virtualenv.pypa.io/en/stable/installation.html) (Recommended)
- [Docker](https://www.docker.com/products/docker) (Recommended)

View File

@ -0,0 +1,23 @@
#!/usr/bin/env python3
import json
import os
from pathlib import Path
import ccxt
key = os.environ.get("FREQTRADE__EXCHANGE__KEY")
secret = os.environ.get("FREQTRADE__EXCHANGE__SECRET")
proxy = os.environ.get("CI_WEB_PROXY")
exchange = ccxt.binance(
{"apiKey": key, "secret": secret, "httpsProxy": proxy, "options": {"defaultType": "swap"}}
)
_ = exchange.load_markets()
lev_tiers = exchange.fetch_leverage_tiers()
# Assumes this is running in the root of the repository.
file = Path("freqtrade/exchange/binance_leverage_tiers.json")
json.dump(dict(sorted(lev_tiers.items())), file.open("w"), indent=2)

View File

@ -0,0 +1,53 @@
import subprocess
from pathlib import Path
subcommands = [
"trade",
"create-userdir",
"new-config",
"show-config",
"new-strategy",
"download-data",
"convert-data",
"convert-trade-data",
"trades-to-ohlcv",
"list-data",
"backtesting",
"backtesting-show",
"backtesting-analysis",
"edge",
"hyperopt",
"hyperopt-list",
"hyperopt-show",
"list-exchanges",
"list-markets",
"list-pairs",
"list-strategies",
"list-hyperoptloss",
"list-freqaimodels",
"list-timeframes",
"show-trades",
"test-pairlist",
"convert-db",
"install-ui",
"plot-dataframe",
"plot-profit",
"webserver",
"strategy-updater",
"lookahead-analysis",
"recursive-analysis",
]
result = subprocess.run(["freqtrade", "--help"], capture_output=True, text=True)
with Path("docs/commands/main.md").open("w") as f:
f.write(f"```\n{result.stdout}\n```\n")
for command in subcommands:
print(f"Running for {command}")
result = subprocess.run(["freqtrade", command, "--help"], capture_output=True, text=True)
with Path(f"docs/commands/{command}.md").open("w") as f:
f.write(f"```\n{result.stdout}\n```\n")

View File

@ -0,0 +1,30 @@
"""Script to extract the configuration json schema from config_schema.py file."""
from pathlib import Path
import rapidjson
def extract_config_json_schema():
try:
# Try to import from the installed package
from freqtrade.config_schema import CONF_SCHEMA
except ImportError:
# If freqtrade is not installed, add the parent directory to sys.path
# to import directly from the source
import sys
script_dir = Path(__file__).parent
freqtrade_dir = script_dir.parent
sys.path.insert(0, str(freqtrade_dir))
# Now try to import from the source
from freqtrade.config_schema import CONF_SCHEMA
schema_filename = Path(__file__).parent / "schema.json"
with schema_filename.open("w") as f:
rapidjson.dump(CONF_SCHEMA, f, indent=2)
if __name__ == "__main__":
extract_config_json_schema()

View File

@ -0,0 +1,15 @@
#!/usr/bin/env python3
from freqtrade import __version__ as ft_version
from freqtrade_client import __version__ as client_version
def main():
if ft_version != client_version:
print(f"Versions do not match: \nft: {ft_version} \nclient: {client_version}")
exit(1)
print(f"Versions match: ft: {ft_version}, client: {client_version}")
exit(0)
if __name__ == "__main__":
main()

35
build_helpers/install_ta-lib.sh Executable file
View File

@ -0,0 +1,35 @@
if [ -z "$1" ]; then
INSTALL_LOC=/usr/local
else
INSTALL_LOC=${1}
fi
echo "Installing to ${INSTALL_LOC}"
if [ -n "$2" ] || [ ! -f "${INSTALL_LOC}/lib/libta_lib.a" ]; then
tar zxvf ta-lib-0.4.0-src.tar.gz
cd ta-lib \
&& sed -i.bak "s|0.00000001|0.000000000000000001 |g" src/ta_func/ta_utility.h \
&& echo "Downloading gcc config.guess and config.sub" \
&& curl -s 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.guess' -o config.guess \
&& curl -s 'https://raw.githubusercontent.com/gcc-mirror/gcc/master/config.sub' -o config.sub \
&& ./configure --prefix=${INSTALL_LOC}/ \
&& make
if [ $? -ne 0 ]; then
echo "Failed building ta-lib."
cd .. && rm -rf ./ta-lib/
exit 1
fi
if [ -z "$2" ]; then
which sudo && sudo make install || make install
if [ -x "$(command -v apt-get)" ]; then
echo "Updating library path using ldconfig"
sudo ldconfig
fi
else
# Don't install with sudo
make install
fi
cd .. && rm -rf ./ta-lib/
else
echo "TA-lib already installed, skipping installation"
fi

View File

@ -0,0 +1,10 @@
# vendored Wheels compiled via https://github.com/xmatthias/ta-lib-python/tree/ta_bundled_040
python -m pip install --upgrade pip
python -c "import sys; print(f'{sys.version_info.major}.{sys.version_info.minor}')"
pip install -U wheel "numpy<2"
pip install --only-binary ta-lib --find-links=build_helpers\ ta-lib
pip install -r requirements-dev.txt
pip install -e .

View File

@ -0,0 +1,49 @@
# File used in CI to ensure pre-commit dependencies are kept up-to-date.
import sys
from pathlib import Path
import yaml
pre_commit_file = Path(".pre-commit-config.yaml")
require_dev = Path("requirements-dev.txt")
require = Path("requirements.txt")
with require_dev.open("r") as rfile:
requirements = rfile.readlines()
with require.open("r") as rfile:
requirements.extend(rfile.readlines())
# Extract types only
type_reqs = [
r.strip("\n") for r in requirements if r.startswith("types-") or r.startswith("SQLAlchemy")
]
with pre_commit_file.open("r") as file:
f = yaml.load(file, Loader=yaml.SafeLoader)
mypy_repo = [
repo for repo in f["repos"] if repo["repo"] == "https://github.com/pre-commit/mirrors-mypy"
]
hooks = mypy_repo[0]["hooks"][0]["additional_dependencies"]
errors = []
for hook in hooks:
if hook not in type_reqs:
errors.append(f"{hook} is missing in requirements-dev.txt.")
for req in type_reqs:
if req not in hooks:
errors.append(f"{req} is missing in pre-config file.")
if errors:
for e in errors:
print(e)
sys.exit(1)
sys.exit(0)

View File

@ -0,0 +1,119 @@
#!/bin/sh
# Use BuildKit, otherwise building on ARM fails
export DOCKER_BUILDKIT=1
IMAGE_NAME=freqtradeorg/freqtrade
CACHE_IMAGE=freqtradeorg/freqtrade_cache
GHCR_IMAGE_NAME=ghcr.io/freqtrade/freqtrade
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
TAG_FREQAI=${TAG}_freqai
TAG_FREQAI_RL=${TAG_FREQAI}rl
TAG_FREQAI_TORCH=${TAG_FREQAI}torch
TAG_PI="${TAG}_pi"
TAG_ARM=${TAG}_arm
TAG_PLOT_ARM=${TAG_PLOT}_arm
TAG_FREQAI_ARM=${TAG_FREQAI}_arm
TAG_FREQAI_RL_ARM=${TAG_FREQAI_RL}_arm
echo "Running for ${TAG}"
# Add commit and commit_message to docker container
echo "${GITHUB_SHA}" > freqtrade_commit
if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then
echo "event ${GITHUB_EVENT_NAME}: full rebuild - skipping cache"
# Build regular image
docker build -t freqtrade:${TAG_ARM} .
else
echo "event ${GITHUB_EVENT_NAME}: building with cache"
# Build regular image
docker pull ${IMAGE_NAME}:${TAG_ARM}
docker build --cache-from ${IMAGE_NAME}:${TAG_ARM} -t freqtrade:${TAG_ARM} .
fi
if [ $? -ne 0 ]; then
echo "failed building multiarch images"
return 1
fi
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_PLOT_ARM} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_ARM} -t freqtrade:${TAG_FREQAI_ARM} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI_ARM} -t freqtrade:${TAG_FREQAI_RL_ARM} -f docker/Dockerfile.freqai_rl .
# Tag image for upload and next build step
docker tag freqtrade:$TAG_ARM ${CACHE_IMAGE}:$TAG_ARM
docker tag freqtrade:$TAG_PLOT_ARM ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker tag freqtrade:$TAG_FREQAI_ARM ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker tag freqtrade:$TAG_FREQAI_RL_ARM ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
# Run backtest
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG_ARM} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
if [ $? -ne 0 ]; then
echo "failed running backtest"
return 1
fi
docker images
docker push ${CACHE_IMAGE}:$TAG_PLOT_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_ARM
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL_ARM
docker push ${CACHE_IMAGE}:$TAG_ARM
# Create multi-arch image
# Make sure that all images contained here are pushed to github first.
# Otherwise installation might fail.
echo "create manifests"
docker manifest create ${IMAGE_NAME}:${TAG} ${CACHE_IMAGE}:${TAG} ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI}
docker manifest push -p ${IMAGE_NAME}:${TAG}
docker manifest create ${IMAGE_NAME}:${TAG_PLOT} ${CACHE_IMAGE}:${TAG_PLOT} ${CACHE_IMAGE}:${TAG_PLOT_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_PLOT}
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI} ${CACHE_IMAGE}:${TAG_FREQAI} ${CACHE_IMAGE}:${TAG_FREQAI_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI}
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_RL}
# Create special Torch tag - which is identical to the RL tag.
docker manifest create ${IMAGE_NAME}:${TAG_FREQAI_TORCH} ${CACHE_IMAGE}:${TAG_FREQAI_RL} ${CACHE_IMAGE}:${TAG_FREQAI_RL_ARM}
docker manifest push -p ${IMAGE_NAME}:${TAG_FREQAI_TORCH}
# copy images to ghcr.io
alias crane="docker run --rm -i -v $(pwd)/.crane:/home/nonroot/.docker/ gcr.io/go-containerregistry/crane"
mkdir .crane
chmod a+rwx .crane
echo "${GHCR_TOKEN}" | crane auth login ghcr.io -u "${GHCR_USERNAME}" --password-stdin
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_RL}
crane copy ${IMAGE_NAME}:${TAG_FREQAI_RL} ${GHCR_IMAGE_NAME}:${TAG_FREQAI_TORCH}
crane copy ${IMAGE_NAME}:${TAG_FREQAI} ${GHCR_IMAGE_NAME}:${TAG_FREQAI}
crane copy ${IMAGE_NAME}:${TAG_PLOT} ${GHCR_IMAGE_NAME}:${TAG_PLOT}
crane copy ${IMAGE_NAME}:${TAG} ${GHCR_IMAGE_NAME}:${TAG}
# Tag as latest for develop builds
if [ "${TAG}" = "develop" ]; then
echo 'Tagging image as latest'
docker manifest create ${IMAGE_NAME}:latest ${CACHE_IMAGE}:${TAG_ARM} ${IMAGE_NAME}:${TAG_PI} ${CACHE_IMAGE}:${TAG}
docker manifest push -p ${IMAGE_NAME}:latest
crane copy ${IMAGE_NAME}:latest ${GHCR_IMAGE_NAME}:latest
fi
docker images
rm -rf .crane
# Cleanup old images from arm64 node.
docker image prune -a --force --filter "until=24h"

View File

@ -0,0 +1,89 @@
#!/bin/sh
# The below assumes a correctly setup docker buildx environment
IMAGE_NAME=freqtradeorg/freqtrade
CACHE_IMAGE=freqtradeorg/freqtrade_cache
# Replace / with _ to create a valid tag
TAG=$(echo "${BRANCH_NAME}" | sed -e "s/\//_/g")
TAG_PLOT=${TAG}_plot
TAG_FREQAI=${TAG}_freqai
TAG_FREQAI_RL=${TAG_FREQAI}rl
TAG_PI="${TAG}_pi"
PI_PLATFORM="linux/arm/v7"
echo "Running for ${TAG}"
CACHE_TAG=${CACHE_IMAGE}:${TAG_PI}_cache
# Add commit and commit_message to docker container
echo "${GITHUB_SHA}" > freqtrade_commit
if [ "${GITHUB_EVENT_NAME}" = "schedule" ]; then
echo "event ${GITHUB_EVENT_NAME}: full rebuild - skipping cache"
# Build regular image
docker build -t freqtrade:${TAG} .
# Build PI image
docker buildx build \
--cache-to=type=registry,ref=${CACHE_TAG} \
-f docker/Dockerfile.armhf \
--platform ${PI_PLATFORM} \
-t ${IMAGE_NAME}:${TAG_PI} \
--push \
--provenance=false \
.
else
echo "event ${GITHUB_EVENT_NAME}: building with cache"
# Build regular image
docker pull ${IMAGE_NAME}:${TAG}
docker build --cache-from ${IMAGE_NAME}:${TAG} -t freqtrade:${TAG} .
# Pull last build to avoid rebuilding the whole image
# docker pull --platform ${PI_PLATFORM} ${IMAGE_NAME}:${TAG}
# disable provenance due to https://github.com/docker/buildx/issues/1509
docker buildx build \
--cache-from=type=registry,ref=${CACHE_TAG} \
--cache-to=type=registry,ref=${CACHE_TAG} \
-f docker/Dockerfile.armhf \
--platform ${PI_PLATFORM} \
-t ${IMAGE_NAME}:${TAG_PI} \
--push \
--provenance=false \
.
fi
if [ $? -ne 0 ]; then
echo "failed building multiarch images"
return 1
fi
# Tag image for upload and next build step
docker tag freqtrade:$TAG ${CACHE_IMAGE}:$TAG
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_PLOT} -f docker/Dockerfile.plot .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG} -t freqtrade:${TAG_FREQAI} -f docker/Dockerfile.freqai .
docker build --build-arg sourceimage=freqtrade --build-arg sourcetag=${TAG_FREQAI} -t freqtrade:${TAG_FREQAI_RL} -f docker/Dockerfile.freqai_rl .
docker tag freqtrade:$TAG_PLOT ${CACHE_IMAGE}:$TAG_PLOT
docker tag freqtrade:$TAG_FREQAI ${CACHE_IMAGE}:$TAG_FREQAI
docker tag freqtrade:$TAG_FREQAI_RL ${CACHE_IMAGE}:$TAG_FREQAI_RL
# Run backtest
docker run --rm -v $(pwd)/tests/testdata/config.tests.json:/freqtrade/config.json:ro -v $(pwd)/tests:/tests freqtrade:${TAG} backtesting --datadir /tests/testdata --strategy-path /tests/strategy/strats/ --strategy StrategyTestV3
if [ $? -ne 0 ]; then
echo "failed running backtest"
return 1
fi
docker images
docker push ${CACHE_IMAGE}:$TAG
docker push ${CACHE_IMAGE}:$TAG_PLOT
docker push ${CACHE_IMAGE}:$TAG_FREQAI
docker push ${CACHE_IMAGE}:$TAG_FREQAI_RL
docker images
if [ $? -ne 0 ]; then
echo "failed building image"
return 1
fi

1676
build_helpers/schema.json Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,84 @@
{
"$schema": "https://schema.freqtrade.io/schema.json",
"max_open_trades": 3,
"stake_currency": "USDT",
"stake_amount": 0.05,
"tradable_balance_ratio": 0.99,
"fiat_display_currency": "USD",
"timeframe": "5m",
"dry_run": true,
"cancel_open_orders_on_exit": false,
"unfilledtimeout": {
"entry": 10,
"exit": 10,
"exit_timeout_count": 0,
"unit": "minutes"
},
"entry_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1,
"price_last_balance": 0.0,
"check_depth_of_market": {
"enabled": false,
"bids_to_ask_delta": 1
}
},
"exit_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1
},
"exchange": {
"name": "binance",
"key": "your_exchange_key",
"secret": "your_exchange_secret",
"ccxt_config": {},
"ccxt_async_config": {
},
"pair_whitelist": [
"ALGO/USDT",
"ATOM/USDT",
"BAT/USDT",
"BCH/USDT",
"BRD/USDT",
"EOS/USDT",
"ETH/USDT",
"IOTA/USDT",
"LINK/USDT",
"LTC/USDT",
"NEO/USDT",
"NXS/USDT",
"XMR/USDT",
"XRP/USDT",
"XTZ/USDT"
],
"pair_blacklist": [
"BNB/.*"
]
},
"pairlists": [
{"method": "StaticPairList"}
],
"telegram": {
"enabled": false,
"token": "your_telegram_token",
"chat_id": "your_telegram_chat_id"
},
"api_server": {
"enabled": false,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"verbosity": "error",
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "freqtrader",
"password": "SuperSecurePassword"
},
"bot_name": "freqtrade",
"initial_state": "running",
"force_entry_enable": false,
"internals": {
"process_throttle_secs": 5
}
}

View File

@ -0,0 +1,212 @@
{
"$schema": "https://schema.freqtrade.io/schema.json",
"max_open_trades": 3,
"stake_currency": "BTC",
"stake_amount": 0.05,
"tradable_balance_ratio": 0.99,
"fiat_display_currency": "USD",
"amount_reserve_percent": 0.05,
"available_capital": 1000,
"amend_last_stake_amount": false,
"last_stake_amount_min_ratio": 0.5,
"dry_run": true,
"dry_run_wallet": 1000,
"cancel_open_orders_on_exit": false,
"timeframe": "5m",
"trailing_stop": false,
"trailing_stop_positive": 0.005,
"trailing_stop_positive_offset": 0.0051,
"trailing_only_offset_is_reached": false,
"use_exit_signal": true,
"exit_profit_only": false,
"exit_profit_offset": 0.0,
"ignore_roi_if_entry_signal": false,
"ignore_buying_expired_candle_after": 300,
"trading_mode": "spot",
"margin_mode": "",
"minimal_roi": {
"40": 0.0,
"30": 0.01,
"20": 0.02,
"0": 0.04
},
"stoploss": -0.10,
"unfilledtimeout": {
"entry": 10,
"exit": 10,
"exit_timeout_count": 0,
"unit": "minutes"
},
"entry_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1,
"price_last_balance": 0.0,
"check_depth_of_market": {
"enabled": false,
"bids_to_ask_delta": 1
}
},
"exit_pricing":{
"price_side": "same",
"use_order_book": true,
"order_book_top": 1,
"price_last_balance": 0.0
},
"order_types": {
"entry": "limit",
"exit": "limit",
"emergency_exit": "market",
"force_exit": "market",
"force_entry": "market",
"stoploss": "market",
"stoploss_on_exchange": false,
"stoploss_price_type": "last",
"stoploss_on_exchange_interval": 60,
"stoploss_on_exchange_limit_ratio": 0.99
},
"order_time_in_force": {
"entry": "GTC",
"exit": "GTC"
},
"pairlists": [
{"method": "StaticPairList"},
{"method": "FullTradesFilter"},
{
"method": "VolumePairList",
"number_assets": 20,
"sort_key": "quoteVolume",
"refresh_period": 1800
},
{"method": "AgeFilter", "min_days_listed": 10},
{"method": "PrecisionFilter"},
{"method": "PriceFilter", "low_price_ratio": 0.01, "min_price": 0.00000010},
{"method": "SpreadFilter", "max_spread_ratio": 0.005},
{
"method": "RangeStabilityFilter",
"lookback_days": 10,
"min_rate_of_change": 0.01,
"refresh_period": 1440
}
],
"exchange": {
"name": "binance",
"key": "your_exchange_key",
"secret": "your_exchange_secret",
"password": "",
"log_responses": false,
// "unknown_fee_rate": 1,
"ccxt_config": {},
"ccxt_async_config": {},
"pair_whitelist": [
"ALGO/BTC",
"ATOM/BTC",
"BAT/BTC",
"BCH/BTC",
"BRD/BTC",
"EOS/BTC",
"ETH/BTC",
"IOTA/BTC",
"LINK/BTC",
"LTC/BTC",
"NEO/BTC",
"NXS/BTC",
"XMR/BTC",
"XRP/BTC",
"XTZ/BTC"
],
"pair_blacklist": [
"DOGE/BTC"
],
"outdated_offset": 5,
"markets_refresh_interval": 60
},
"edge": {
"enabled": false,
"process_throttle_secs": 3600,
"calculate_since_number_of_days": 7,
"allowed_risk": 0.01,
"stoploss_range_min": -0.01,
"stoploss_range_max": -0.1,
"stoploss_range_step": -0.01,
"minimum_winrate": 0.60,
"minimum_expectancy": 0.20,
"min_trade_number": 10,
"max_trade_duration_minute": 1440,
"remove_pumps": false
},
"telegram": {
"enabled": false,
"token": "your_telegram_token",
"chat_id": "your_telegram_chat_id",
"notification_settings": {
"status": "on",
"warning": "on",
"startup": "on",
"entry": "on",
"entry_fill": "on",
"exit": {
"roi": "off",
"emergency_exit": "off",
"force_exit": "off",
"exit_signal": "off",
"trailing_stop_loss": "off",
"stop_loss": "off",
"stoploss_on_exchange": "off",
"custom_exit": "off"
},
"exit_fill": "on",
"entry_cancel": "on",
"exit_cancel": "on",
"protection_trigger": "off",
"protection_trigger_global": "on",
"show_candle": "off"
},
"reload": true,
"balance_dust_level": 0.01
},
"api_server": {
"enabled": false,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"verbosity": "error",
"enable_openapi": false,
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "freqtrader",
"password": "SuperSecurePassword",
"ws_token": "secret_ws_t0ken."
},
"external_message_consumer": {
"enabled": false,
"producers": [
{
"name": "default",
"host": "127.0.0.2",
"port": 8080,
"ws_token": "secret_ws_t0ken."
}
],
"wait_timeout": 300,
"ping_timeout": 10,
"sleep_time": 10,
"remove_entry_exit_signals": false,
"message_size_limit": 8
},
"bot_name": "freqtrade",
"db_url": "sqlite:///tradesv3.sqlite",
"initial_state": "running",
"force_entry_enable": false,
"internals": {
"process_throttle_secs": 5,
"heartbeat_interval": 60
},
"disable_dataframe_checks": false,
"strategy": "SampleStrategy",
"strategy_path": "user_data/strategies/",
"recursive_strategy_search": false,
"add_config_files": [],
"reduce_df_footprint": false,
"dataformat_ohlcv": "feather",
"dataformat_trades": "feather"
}

View File

@ -0,0 +1,90 @@
{
"$schema": "https://schema.freqtrade.io/schema.json",
"max_open_trades": 5,
"stake_currency": "EUR",
"stake_amount": 10,
"tradable_balance_ratio": 0.99,
"fiat_display_currency": "EUR",
"timeframe": "5m",
"dry_run": true,
"cancel_open_orders_on_exit": false,
"unfilledtimeout": {
"entry": 10,
"exit": 10,
"exit_timeout_count": 0,
"unit": "minutes"
},
"entry_pricing": {
"price_side": "same",
"use_order_book": true,
"order_book_top": 1,
"price_last_balance": 0.0,
"check_depth_of_market": {
"enabled": false,
"bids_to_ask_delta": 1
}
},
"exit_pricing":{
"price_side": "same",
"use_order_book": true,
"order_book_top": 1
},
"exchange": {
"name": "kraken",
"key": "your_exchange_key",
"secret": "your_exchange_key",
"ccxt_config": {},
"ccxt_async_config": {
},
"pair_whitelist": [
"ADA/EUR",
"ATOM/EUR",
"BAT/EUR",
"BCH/EUR",
"BTC/EUR",
"DAI/EUR",
"DASH/EUR",
"EOS/EUR",
"ETC/EUR",
"ETH/EUR",
"LINK/EUR",
"LTC/EUR",
"QTUM/EUR",
"REP/EUR",
"WAVES/EUR",
"XLM/EUR",
"XMR/EUR",
"XRP/EUR",
"XTZ/EUR",
"ZEC/EUR"
],
"pair_blacklist": [
]
},
"pairlists": [
{"method": "StaticPairList"}
],
"telegram": {
"enabled": false,
"token": "your_telegram_token",
"chat_id": "your_telegram_chat_id"
},
"api_server": {
"enabled": false,
"listen_ip_address": "127.0.0.1",
"listen_port": 8080,
"verbosity": "error",
"jwt_secret_key": "somethingrandom",
"CORS_origins": [],
"username": "freqtrader",
"password": "SuperSecurePassword"
},
"bot_name": "freqtrade",
"initial_state": "running",
"force_entry_enable": false,
"internals": {
"process_throttle_secs": 5
},
"download_trades": true
}

41
docker-compose.yml Normal file
View File

@ -0,0 +1,41 @@
---
services:
freqtrade:
image: freqtradeorg/freqtrade:stable_freqaitorch
# # Enable GPU Image and GPU Resources
# # Make sure to uncomment the whole deploy section
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]
# Build step - only needed when additional dependencies are needed
# build:
# context: .
# dockerfile: "./docker/Dockerfile.custom"
restart: always
container_name: freqtrade
volumes:
- "./user_data:/freqtrade/user_data"
- "./config_examples:/freqtrade/config_examples"
- "./freqtrade/templates:/freqtrade/templates"
# Expose api on port 8080 (localhost only)
# Please read the https://www.freqtrade.io/en/stable/rest-api/ documentation
# for more information.
ports:
- "8080:8080"
# Default command used when running `docker compose up`
# --freqaimodel XGBoostRegressor
command: >
trade
--logfile /freqtrade/user_data/logs/freqtrade.log
--db-url sqlite:////freqtrade/user_data/tradesv3.sqlite
--freqaimodel LightGBMRegressor
--config /freqtrade/config_examples/config_freqai.okx.json
--strategy FreqaiExampleStrategy
--strategy-path /freqtrade/templates

View File

@ -0,0 +1,58 @@
FROM python:3.11.11-slim-bookworm as base
# Setup env
ENV LANG C.UTF-8
ENV LC_ALL C.UTF-8
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONFAULTHANDLER 1
ENV PATH=/home/ftuser/.local/bin:$PATH
ENV FT_APP_ENV="docker"
# Prepare environment
RUN mkdir /freqtrade \
&& apt-get update \
&& apt-get -y install sudo libatlas3-base libopenblas-dev curl sqlite3 libutf8proc-dev libsnappy-dev \
&& apt-get clean \
&& useradd -u 1000 -G sudo -U -m ftuser \
&& chown ftuser:ftuser /freqtrade \
# Allow sudoers
&& echo "ftuser ALL=(ALL) NOPASSWD: /bin/chown" >> /etc/sudoers \
&& pip install --upgrade pip
WORKDIR /freqtrade
# Install dependencies
FROM base as python-deps
RUN apt-get update \
&& apt-get -y install build-essential libssl-dev libffi-dev libgfortran5 pkg-config cmake gcc \
&& apt-get clean \
&& echo "[global]\nextra-index-url=https://www.piwheels.org/simple" > /etc/pip.conf
# Install TA-lib
COPY build_helpers/* /tmp/
# Install dependencies
COPY --chown=ftuser:ftuser requirements.txt /freqtrade/
USER ftuser
RUN pip install --user --no-cache-dir "numpy<2" \
&& pip install --user --no-index --find-links /tmp/ pyarrow TA-Lib \
&& pip install --user --no-cache-dir -r requirements.txt
# Copy dependencies to runtime-image
FROM base as runtime-image
COPY --from=python-deps /usr/local/lib /usr/local/lib
ENV LD_LIBRARY_PATH /usr/local/lib
COPY --from=python-deps --chown=ftuser:ftuser /home/ftuser/.local /home/ftuser/.local
USER ftuser
# Install and execute
COPY --chown=ftuser:ftuser . /freqtrade/
RUN pip install -e . --user --no-cache-dir --no-build-isolation\
&& mkdir /freqtrade/user_data/ \
&& freqtrade install-ui
ENTRYPOINT ["freqtrade"]
# Default to trade mode
CMD [ "trade" ]

View File

@ -0,0 +1,11 @@
FROM freqtradeorg/freqtrade:develop
# Switch user to root if you must install something from apt
# Don't forget to switch the user back below!
# USER root
# The below dependency - pyti - serves as an example. Please use whatever you need!
RUN pip install --user pyti
# Switch back to user (only if you required root above)
# USER ftuser

View File

@ -0,0 +1,10 @@
FROM freqtradeorg/freqtrade:develop
# Install dependencies
COPY requirements-dev.txt /freqtrade/
RUN pip install numpy --user --no-cache-dir \
&& pip install -r requirements-dev.txt --user --no-cache-dir
# Empty the ENTRYPOINT to allow all commands
ENTRYPOINT []

8
docker/Dockerfile.freqai Normal file
View File

@ -0,0 +1,8 @@
ARG sourceimage=freqtradeorg/freqtrade
ARG sourcetag=develop
FROM ${sourceimage}:${sourcetag}
# Install dependencies
COPY requirements-freqai.txt /freqtrade/
RUN pip install -r requirements-freqai.txt --user --no-cache-dir

View File

@ -0,0 +1,8 @@
ARG sourceimage=freqtradeorg/freqtrade
ARG sourcetag=develop_freqai
FROM ${sourceimage}:${sourcetag}
# Install dependencies
COPY requirements-freqai.txt requirements-freqai-rl.txt /freqtrade/
RUN pip install -r requirements-freqai-rl.txt --user --no-cache-dir

View File

@ -0,0 +1,8 @@
FROM freqtradeorg/freqtrade:develop_plot
# Pin prompt-toolkit to avoid questionary version conflict
RUN pip install jupyterlab "prompt-toolkit<=3.0.36" jupyter-client --user --no-cache-dir
# Empty the ENTRYPOINT to allow all commands
ENTRYPOINT []

View File

@ -0,0 +1,8 @@
ARG sourceimage=freqtradeorg/freqtrade
ARG sourcetag=develop
FROM ${sourceimage}:${sourcetag}
# Install dependencies
COPY requirements-plot.txt /freqtrade/
RUN pip install -r requirements-plot.txt --user --no-cache-dir

View File

@ -0,0 +1,36 @@
---
services:
freqtrade:
image: freqtradeorg/freqtrade:stable
# image: freqtradeorg/freqtrade:develop
# Use plotting image
# image: freqtradeorg/freqtrade:develop_plot
# # Enable GPU Image and GPU Resources (only relevant for freqAI)
# # Make sure to uncomment the whole deploy section
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]
# Build step - only needed when additional dependencies are needed
# build:
# context: .
# dockerfile: "./docker/Dockerfile.custom"
restart: unless-stopped
container_name: freqtrade
volumes:
- "./user_data:/freqtrade/user_data"
# Expose api on port 8080 (localhost only)
# Please read the https://www.freqtrade.io/en/stable/rest-api/ documentation
# for more information.
ports:
- "127.0.0.1:8080:8080"
# Default command used when running `docker compose up`
command: >
trade
--logfile /freqtrade/user_data/logs/freqtrade.log
--db-url sqlite:////freqtrade/user_data/tradesv3.sqlite
--config /freqtrade/user_data/config.json
--strategy SampleStrategy

3
docker/user_data/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
data
logs
models

View File

@ -0,0 +1,218 @@
# Advanced Backtesting Analysis
## Analyze the buy/entry and sell/exit tags
It can be helpful to understand how a strategy behaves according to the buy/entry tags used to
mark up different buy conditions. You might want to see more complex statistics about each buy and
sell condition above those provided by the default backtesting output. You may also want to
determine indicator values on the signal candle that resulted in a trade opening.
!!! Note
The following buy reason analysis is only available for backtesting, *not hyperopt*.
We need to run backtesting with the `--export` option set to `signals` to enable the exporting of
signals **and** trades:
``` bash
freqtrade backtesting -c <config.json> --timeframe <tf> --strategy <strategy_name> --timerange=<timerange> --export=signals
```
This will tell freqtrade to output a pickled dictionary of strategy, pairs and corresponding
DataFrame of the candles that resulted in entry and exit signals.
Depending on how many entries your strategy makes, this file may get quite large, so periodically check your `user_data/backtest_results` folder to delete old exports.
Before running your next backtest, make sure you either delete your old backtest results or run
backtesting with the `--cache none` option to make sure no cached results are used.
If all goes well, you should now see a `backtest-result-{timestamp}_signals.pkl` and `backtest-result-{timestamp}_exited.pkl` files in the `user_data/backtest_results` folder.
To analyze the entry/exit tags, we now need to use the `freqtrade backtesting-analysis` command
with `--analysis-groups` option provided with space-separated arguments:
``` bash
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 1 2 3 4 5
```
This command will read from the last backtesting results. The `--analysis-groups` option is
used to specify the various tabular outputs showing the profit of each group or trade,
ranging from the simplest (0) to the most detailed per pair, per buy and per sell tag (4):
* 0: overall winrate and profit summary by enter_tag
* 1: profit summaries grouped by enter_tag
* 2: profit summaries grouped by enter_tag and exit_tag
* 3: profit summaries grouped by pair and enter_tag
* 4: profit summaries grouped by pair, enter_ and exit_tag (this can get quite large)
* 5: profit summaries grouped by exit_tag
More options are available by running with the `-h` option.
### Using export-filename
Normally, `backtesting-analysis` uses the latest backtest results, but if you wanted to go
back to a previous backtest output, you need to supply the `--export-filename` option.
You can supply the same parameter to `backtest-analysis` with the name of the final backtest
output file. This allows you to keep historical versions of backtest results and re-analyse
them at a later date:
``` bash
freqtrade backtesting -c <config.json> --timeframe <tf> --strategy <strategy_name> --timerange=<timerange> --export=signals --export-filename=/tmp/mystrat_backtest.json
```
You should see some output similar to below in the logs with the name of the timestamped
filename that was exported:
```
2022-06-14 16:28:32,698 - freqtrade.misc - INFO - dumping json to "/tmp/mystrat_backtest-2022-06-14_16-28-32.json"
```
You can then use that filename in `backtesting-analysis`:
```
freqtrade backtesting-analysis -c <config.json> --export-filename=/tmp/mystrat_backtest-2022-06-14_16-28-32.json
```
### Tuning the buy tags and sell tags to display
To show only certain buy and sell tags in the displayed output, use the following two options:
```
--enter-reason-list : Space-separated list of enter signals to analyse. Default: "all"
--exit-reason-list : Space-separated list of exit signals to analyse. Default: "all"
```
For example:
```bash
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 2 --enter-reason-list enter_tag_a enter_tag_b --exit-reason-list roi custom_exit_tag_a stop_loss
```
### Outputting signal candle indicators
The real power of `freqtrade backtesting-analysis` comes from the ability to print out the indicator
values present on signal candles to allow fine-grained investigation and tuning of buy signal
indicators. To print out a column for a given set of indicators, use the `--indicator-list`
option:
```bash
freqtrade backtesting-analysis -c <config.json> --analysis-groups 0 2 --enter-reason-list enter_tag_a enter_tag_b --exit-reason-list roi custom_exit_tag_a stop_loss --indicator-list rsi rsi_1h bb_lowerband ema_9 macd macdsignal
```
The indicators have to be present in your strategy's main DataFrame (either for your main
timeframe or for informative timeframes) otherwise they will simply be ignored in the script
output.
!!! Note "Indicator List"
The indicator values will be displayed for both entry and exit points. If `--indicator-list all` is specified,
only the indicators at the entry point will be shown to avoid excessively large lists, which could occur depending on the strategy.
There are a range of candle and trade-related fields that are included in the analysis so are
automatically accessible by including them on the indicator-list, and these include:
- **open_date :** trade open datetime
- **close_date :** trade close datetime
- **min_rate :** minimum price seen throughout the position
- **max_rate :** maximum price seen throughout the position
- **open :** signal candle open price
- **close :** signal candle close price
- **high :** signal candle high price
- **low :** signal candle low price
- **volume :** signal candle volume
- **profit_ratio :** trade profit ratio
- **profit_abs :** absolute profit return of the trade
#### Sample Output for Indicator Values
```bash
freqtrade backtesting-analysis -c user_data/config.json --analysis-groups 0 --indicator-list chikou_span tenkan_sen
```
In this example,
we aim to display the `chikou_span` and `tenkan_sen` indicator values at both the entry and exit points of trades.
A sample output for indicators might look like this:
| pair | open_date | enter_reason | exit_reason | chikou_span (entry) | tenkan_sen (entry) | chikou_span (exit) | tenkan_sen (exit) |
|-----------|---------------------------|--------------|-------------|---------------------|--------------------|--------------------|-------------------|
| DOGE/USDT | 2024-07-06 00:35:00+00:00 | | exit_signal | 0.105 | 0.106 | 0.105 | 0.107 |
| BTC/USDT | 2024-08-05 14:20:00+00:00 | | roi | 54643.440 | 51696.400 | 54386.000 | 52072.010 |
As shown in the table, `chikou_span (entry)` represents the indicator value at the time of trade entry,
while `chikou_span (exit)` reflects its value at the time of exit.
This detailed view of indicator values enhances the analysis.
The `(entry)` and `(exit)` suffixes are added to indicators
to distinguish the values at the entry and exit points of the trade.
!!! Note "Trade-wide Indicators"
Certain trade-wide indicators do not have the `(entry)` or `(exit)` suffix. These indicators include: `pair`, `stake_amount`,
`max_stake_amount`, `amount`, `open_date`, `close_date`, `open_rate`, `close_rate`, `fee_open`, `fee_close`, `trade_duration`,
`profit_ratio`, `profit_abs`, `exit_reason`,`initial_stop_loss_abs`, `initial_stop_loss_ratio`, `stop_loss_abs`, `stop_loss_ratio`,
`min_rate`, `max_rate`, `is_open`, `enter_tag`, `leverage`, `is_short`, `open_timestamp`, `close_timestamp` and `orders`
#### Filtering Indicators Based on Entry or Exit Signals
The `--indicator-list` option, by default, displays indicator values for both entry and exit signals. To filter the indicator values exclusively for entry signals, you can use the `--entry-only` argument. Similarly, to display indicator values only at exit signals, use the `--exit-only` argument.
Example: Display indicator values at entry signals:
```bash
freqtrade backtesting-analysis -c user_data/config.json --analysis-groups 0 --indicator-list chikou_span tenkan_sen --entry-only
```
Example: Display indicator values at exit signals:
```bash
freqtrade backtesting-analysis -c user_data/config.json --analysis-groups 0 --indicator-list chikou_span tenkan_sen --exit-only
```
!!! note
When using these filters, the indicator names will not be suffixed with `(entry)` or `(exit)`.
### Filtering the trade output by date
To show only trades between dates within your backtested timerange, supply the usual `timerange` option in `YYYYMMDD-[YYYYMMDD]` format:
```
--timerange : Timerange to filter output trades, start date inclusive, end date exclusive. e.g. 20220101-20221231
```
For example, if your backtest timerange was `20220101-20221231` but you only want to output trades in January:
```bash
freqtrade backtesting-analysis -c <config.json> --timerange 20220101-20220201
```
### Printing out rejected signals
Use the `--rejected-signals` option to print out rejected signals.
```bash
freqtrade backtesting-analysis -c <config.json> --rejected-signals
```
### Writing tables to CSV
Some of the tabular outputs can become large, so printing them out to the terminal is not preferable.
Use the `--analysis-to-csv` option to disable printing out of tables to standard out and write them to CSV files.
```bash
freqtrade backtesting-analysis -c <config.json> --analysis-to-csv
```
By default this will write one file per output table you specified in the `backtesting-analysis` command, e.g.
```bash
freqtrade backtesting-analysis -c <config.json> --analysis-to-csv --rejected-signals --analysis-groups 0 1
```
This will write to `user_data/backtest_results`:
* rejected_signals.csv
* group_0.csv
* group_1.csv
To override where the files will be written, also specify the `--analysis-csv-path` option.
```bash
freqtrade backtesting-analysis -c <config.json> --analysis-to-csv --analysis-csv-path another/data/path/
```

235
docs/advanced-hyperopt.md Normal file
View File

@ -0,0 +1,235 @@
# Advanced Hyperopt
This page explains some advanced Hyperopt topics that may require higher
coding skills and Python knowledge than creation of an ordinal hyperoptimization
class.
## Creating and using a custom loss function
To use a custom loss function class, make sure that the function `hyperopt_loss_function` is defined in your custom hyperopt loss class.
For the sample below, you then need to add the command line parameter `--hyperopt-loss SuperDuperHyperOptLoss` to your hyperopt call so this function is being used.
A sample of this can be found below, which is identical to the Default Hyperopt loss implementation. A full sample can be found in [userdata/hyperopts](https://github.com/freqtrade/freqtrade/blob/develop/freqtrade/templates/sample_hyperopt_loss.py).
``` python
from datetime import datetime
from typing import Any, Dict
from pandas import DataFrame
from freqtrade.constants import Config
from freqtrade.optimize.hyperopt import IHyperOptLoss
TARGET_TRADES = 600
EXPECTED_MAX_PROFIT = 3.0
MAX_ACCEPTED_TRADE_DURATION = 300
class SuperDuperHyperOptLoss(IHyperOptLoss):
"""
Defines the default loss function for hyperopt
"""
@staticmethod
def hyperopt_loss_function(
*,
results: DataFrame,
trade_count: int,
min_date: datetime,
max_date: datetime,
config: Config,
processed: dict[str, DataFrame],
backtest_stats: dict[str, Any],
starting_balance: float,
**kwargs,
) -> float:
"""
Objective function, returns smaller number for better results
This is the legacy algorithm (used until now in freqtrade).
Weights are distributed as follows:
* 0.4 to trade duration
* 0.25: Avoiding trade loss
* 1.0 to total profit, compared to the expected value (`EXPECTED_MAX_PROFIT`) defined above
"""
total_profit = results['profit_ratio'].sum()
trade_duration = results['trade_duration'].mean()
trade_loss = 1 - 0.25 * exp(-(trade_count - TARGET_TRADES) ** 2 / 10 ** 5.8)
profit_loss = max(0, 1 - total_profit / EXPECTED_MAX_PROFIT)
duration_loss = 0.4 * min(trade_duration / MAX_ACCEPTED_TRADE_DURATION, 1)
result = trade_loss + profit_loss + duration_loss
return result
```
Currently, the arguments are:
* `results`: DataFrame containing the resulting trades.
The following columns are available in results (corresponds to the output-file of backtesting when used with `--export trades`):
`pair, profit_ratio, profit_abs, open_date, open_rate, fee_open, close_date, close_rate, fee_close, amount, trade_duration, is_open, exit_reason, stake_amount, min_rate, max_rate, stop_loss_ratio, stop_loss_abs`
* `trade_count`: Amount of trades (identical to `len(results)`)
* `min_date`: Start date of the timerange used
* `min_date`: End date of the timerange used
* `config`: Config object used (Note: Not all strategy-related parameters will be updated here if they are part of a hyperopt space).
* `processed`: Dict of Dataframes with the pair as keys containing the data used for backtesting.
* `backtest_stats`: Backtesting statistics using the same format as the backtesting file "strategy" substructure. Available fields can be seen in `generate_strategy_stats()` in `optimize_reports.py`.
* `starting_balance`: Starting balance used for backtesting.
This function needs to return a floating point number (`float`). Smaller numbers will be interpreted as better results. The parameters and balancing for this is up to you.
!!! Note
This function is called once per epoch - so please make sure to have this as optimized as possible to not slow hyperopt down unnecessarily.
!!! Note "`*args` and `**kwargs`"
Please keep the arguments `*args` and `**kwargs` in the interface to allow us to extend this interface in the future.
## Overriding pre-defined spaces
To override a pre-defined space (`roi_space`, `generate_roi_table`, `stoploss_space`, `trailing_space`, `max_open_trades_space`), define a nested class called Hyperopt and define the required spaces as follows:
```python
from freqtrade.optimize.space import Categorical, Dimension, Integer, SKDecimal
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
# Define a custom stoploss space.
def stoploss_space():
return [SKDecimal(-0.05, -0.01, decimals=3, name='stoploss')]
# Define custom ROI space
def roi_space() -> List[Dimension]:
return [
Integer(10, 120, name='roi_t1'),
Integer(10, 60, name='roi_t2'),
Integer(10, 40, name='roi_t3'),
SKDecimal(0.01, 0.04, decimals=3, name='roi_p1'),
SKDecimal(0.01, 0.07, decimals=3, name='roi_p2'),
SKDecimal(0.01, 0.20, decimals=3, name='roi_p3'),
]
def generate_roi_table(params: Dict) -> dict[int, float]:
roi_table = {}
roi_table[0] = params['roi_p1'] + params['roi_p2'] + params['roi_p3']
roi_table[params['roi_t3']] = params['roi_p1'] + params['roi_p2']
roi_table[params['roi_t3'] + params['roi_t2']] = params['roi_p1']
roi_table[params['roi_t3'] + params['roi_t2'] + params['roi_t1']] = 0
return roi_table
def trailing_space() -> List[Dimension]:
# All parameters here are mandatory, you can only modify their type or the range.
return [
# Fixed to true, if optimizing trailing_stop we assume to use trailing stop at all times.
Categorical([True], name='trailing_stop'),
SKDecimal(0.01, 0.35, decimals=3, name='trailing_stop_positive'),
# 'trailing_stop_positive_offset' should be greater than 'trailing_stop_positive',
# so this intermediate parameter is used as the value of the difference between
# them. The value of the 'trailing_stop_positive_offset' is constructed in the
# generate_trailing_params() method.
# This is similar to the hyperspace dimensions used for constructing the ROI tables.
SKDecimal(0.001, 0.1, decimals=3, name='trailing_stop_positive_offset_p1'),
Categorical([True, False], name='trailing_only_offset_is_reached'),
]
# Define a custom max_open_trades space
def max_open_trades_space(self) -> List[Dimension]:
return [
Integer(-1, 10, name='max_open_trades'),
]
```
!!! Note
All overrides are optional and can be mixed/matched as necessary.
### Dynamic parameters
Parameters can also be defined dynamically, but must be available to the instance once the [`bot_start()` callback](strategy-callbacks.md#bot-start) has been called.
``` python
class MyAwesomeStrategy(IStrategy):
def bot_start(self, **kwargs) -> None:
self.buy_adx = IntParameter(20, 30, default=30, optimize=True)
# ...
```
!!! Warning
Parameters created this way will not show up in the `list-strategies` parameter count.
### Overriding Base estimator
You can define your own estimator for Hyperopt by implementing `generate_estimator()` in the Hyperopt subclass.
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator(dimensions: List['Dimension'], **kwargs):
return "RF"
```
Possible values are either one of "GP", "RF", "ET", "GBRT" (Details can be found in the [scikit-optimize documentation](https://scikit-optimize.github.io/)), or "an instance of a class that inherits from `RegressorMixin` (from sklearn) and where the `predict` method has an optional `return_std` argument, which returns `std(Y | x)` along with `E[Y | x]`".
Some research will be necessary to find additional Regressors.
Example for `ExtraTreesRegressor` ("ET") with additional parameters:
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator(dimensions: List['Dimension'], **kwargs):
from skopt.learning import ExtraTreesRegressor
# Corresponds to "ET" - but allows additional parameters.
return ExtraTreesRegressor(n_estimators=100)
```
The `dimensions` parameter is the list of `skopt.space.Dimension` objects corresponding to the parameters to be optimized. It can be used to create isotropic kernels for the `skopt.learning.GaussianProcessRegressor` estimator. Here's an example:
```python
class MyAwesomeStrategy(IStrategy):
class HyperOpt:
def generate_estimator(dimensions: List['Dimension'], **kwargs):
from skopt.utils import cook_estimator
from skopt.learning.gaussian_process.kernels import (Matern, ConstantKernel)
kernel_bounds = (0.0001, 10000)
kernel = (
ConstantKernel(1.0, kernel_bounds) *
Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=2.5)
)
kernel += (
ConstantKernel(1.0, kernel_bounds) *
Matern(length_scale=np.ones(len(dimensions)), length_scale_bounds=[kernel_bounds for d in dimensions], nu=1.5)
)
return cook_estimator("GP", space=dimensions, kernel=kernel, n_restarts_optimizer=2)
```
!!! Note
While custom estimators can be provided, it's up to you as User to do research on possible parameters and analyze / understand which ones should be used.
If you're unsure about this, best use one of the Defaults (`"ET"` has proven to be the most versatile) without further parameters.
## Space options
For the additional spaces, scikit-optimize (in combination with Freqtrade) provides the following space types:
* `Categorical` - Pick from a list of categories (e.g. `Categorical(['a', 'b', 'c'], name="cat")`)
* `Integer` - Pick from a range of whole numbers (e.g. `Integer(1, 10, name='rsi')`)
* `SKDecimal` - Pick from a range of decimal numbers with limited precision (e.g. `SKDecimal(0.1, 0.5, decimals=3, name='adx')`). *Available only with freqtrade*.
* `Real` - Pick from a range of decimal numbers with full precision (e.g. `Real(0.1, 0.5, name='adx')`
You can import all of these from `freqtrade.optimize.space`, although `Categorical`, `Integer` and `Real` are only aliases for their corresponding scikit-optimize Spaces. `SKDecimal` is provided by freqtrade for faster optimizations.
``` python
from freqtrade.optimize.space import Categorical, Dimension, Integer, SKDecimal, Real # noqa
```
!!! Hint "SKDecimal vs. Real"
We recommend to use `SKDecimal` instead of the `Real` space in almost all cases. While the Real space provides full accuracy (up to ~16 decimal places) - this precision is rarely needed, and leads to unnecessary long hyperopt times.
Assuming the definition of a rather small space (`SKDecimal(0.10, 0.15, decimals=2, name='xxx')`) - SKDecimal will have 5 possibilities (`[0.10, 0.11, 0.12, 0.13, 0.14, 0.15]`).
A corresponding real space `Real(0.10, 0.15 name='xxx')` on the other hand has an almost unlimited number of possibilities (`[0.10, 0.010000000001, 0.010000000002, ... 0.014999999999, 0.01500000000]`).

153
docs/advanced-orderflow.md Normal file
View File

@ -0,0 +1,153 @@
# Orderflow data
This guide walks you through utilizing public trade data for advanced orderflow analysis in Freqtrade.
!!! Warning "Experimental Feature"
The orderflow feature is currently in beta and may be subject to changes in future releases. Please report any issues or feedback on the [Freqtrade GitHub repository](https://github.com/freqtrade/freqtrade/issues).
It's also currently not been tested with freqAI - and combining these two features is considered out of scope at this point.
!!! Warning "Performance"
Orderflow requires raw trades data. This data is rather large, and can cause a slow initial startup, when freqtrade needs to download the trades data for the last X candles. Additionally, enabling this feature will cause increased memory usage. Please ensure to have sufficient resources available.
## Getting Started
### Enable Public Trades
In your `config.json` file, set the `use_public_trades` option to true under the `exchange` section.
```json
"exchange": {
...
"use_public_trades": true,
}
```
### Configure Orderflow Processing
Define your desired settings for orderflow processing within the orderflow section of config.json. Here, you can adjust factors like:
- `cache_size`: How many previous orderflow candles are saved into cache instead of calculated every new candle
- `max_candles`: Filter how many candles would you like to get trades data for.
- `scale`: This controls the price bin size for the footprint chart.
- `stacked_imbalance_range`: Defines the minimum consecutive imbalanced price levels required for consideration.
- `imbalance_volume`: Filters out imbalances with volume below this threshold.
- `imbalance_ratio`: Filters out imbalances with a ratio (difference between ask and bid volume) lower than this value.
```json
"orderflow": {
"cache_size": 1000,
"max_candles": 1500,
"scale": 0.5,
"stacked_imbalance_range": 3, // needs at least this amount of imbalance next to each other
"imbalance_volume": 1, // filters out below
"imbalance_ratio": 3 // filters out ratio lower than
},
```
## Downloading Trade Data for Backtesting
To download historical trade data for backtesting, use the --dl-trades flag with the freqtrade download-data command.
```bash
freqtrade download-data -p BTC/USDT:USDT --timerange 20230101- --trading-mode futures --timeframes 5m --dl-trades
```
!!! Warning "Data availability"
Not all exchanges provide public trade data. For supported exchanges, freqtrade will warn you if public trade data is not available if you start downloading data with the `--dl-trades` flag.
## Accessing Orderflow Data
Once activated, several new columns become available in your dataframe:
``` python
dataframe["trades"] # Contains information about each individual trade.
dataframe["orderflow"] # Represents a footprint chart dict (see below)
dataframe["imbalances"] # Contains information about imbalances in the order flow.
dataframe["bid"] # Total bid volume
dataframe["ask"] # Total ask volume
dataframe["delta"] # Difference between ask and bid volume.
dataframe["min_delta"] # Minimum delta within the candle
dataframe["max_delta"] # Maximum delta within the candle
dataframe["total_trades"] # Total number of trades
dataframe["stacked_imbalances_bid"] # List of price levels of stacked bid imbalance range beginnings
dataframe["stacked_imbalances_ask"] # List of price levels of stacked ask imbalance range beginnings
```
You can access these columns in your strategy code for further analysis. Here's an example:
``` python
def populate_indicators(self, dataframe: DataFrame, metadata: dict) -> DataFrame:
# Calculating cumulative delta
dataframe["cum_delta"] = cumulative_delta(dataframe["delta"])
# Accessing total trades
total_trades = dataframe["total_trades"]
...
def cumulative_delta(delta: Series):
cumdelta = delta.cumsum()
return cumdelta
```
### Footprint chart (`dataframe["orderflow"]`)
This column provides a detailed breakdown of buy and sell orders at different price levels, offering valuable insights into order flow dynamics. The `scale` parameter in your configuration determines the price bin size for this representation
The `orderflow` column contains a dict with the following structure:
``` output
{
"price": {
"bid_amount": 0.0,
"ask_amount": 0.0,
"bid": 0,
"ask": 0,
"delta": 0.0,
"total_volume": 0.0,
"total_trades": 0
}
}
```
#### Orderflow column explanation
- key: Price bin - binned at `scale` intervals
- `bid_amount`: Total volume bought at each price level.
- `ask_amount`: Total volume sold at each price level.
- `bid`: Number of buy orders at each price level.
- `ask`: Number of sell orders at each price level.
- `delta`: Difference between ask and bid volume at each price level.
- `total_volume`: Total volume (ask amount + bid amount) at each price level.
- `total_trades`: Total number of trades (ask + bid) at each price level.
By leveraging these features, you can gain valuable insights into market sentiment and potential trading opportunities based on order flow analysis.
### Raw trades data (`dataframe["trades"]`)
List with the individual trades that occurred during the candle. This data can be used for more granular analysis of order flow dynamics.
Each individual entry contains a dict with the following keys:
- `timestamp`: Timestamp of the trade.
- `date`: Date of the trade.
- `price`: Price of the trade.
- `amount`: Volume of the trade.
- `side`: Buy or sell.
- `id`: Unique identifier for the trade.
- `cost`: Total cost of the trade (price * amount).
### Imbalances (`dataframe["imbalances"]`)
This column provides a dict with information about imbalances in the order flow. An imbalance occurs when there is a significant difference between the ask and bid volume at a given price level.
Each row looks as follows - with price as index, and the corresponding bid and ask imbalance values as columns
``` output
{
"price": {
"bid_imbalance": False,
"ask_imbalance": False
}
}
```

431
docs/advanced-setup.md Normal file
View File

@ -0,0 +1,431 @@
# Advanced Post-installation Tasks
This page explains some advanced tasks and configuration options that can be performed after the bot installation and may be uselful in some environments.
If you do not know what things mentioned here mean, you probably do not need it.
## Running multiple instances of Freqtrade
This section will show you how to run multiple bots at the same time, on the same machine.
### Things to consider
* Use different database files.
* Use different Telegram bots (requires multiple different configuration files; applies only when Telegram is enabled).
* Use different ports (applies only when Freqtrade REST API webserver is enabled).
### Different database files
In order to keep track of your trades, profits, etc., freqtrade is using a SQLite database where it stores various types of information such as the trades you performed in the past and the current position(s) you are holding at any time. This allows you to keep track of your profits, but most importantly, keep track of ongoing activity if the bot process would be restarted or would be terminated unexpectedly.
Freqtrade will, by default, use separate database files for dry-run and live bots (this assumes no database-url is given in either configuration nor via command line argument).
For live trading mode, the default database will be `tradesv3.sqlite` and for dry-run it will be `tradesv3.dryrun.sqlite`.
The optional argument to the trade command used to specify the path of these files is `--db-url`, which requires a valid SQLAlchemy url.
So when you are starting a bot with only the config and strategy arguments in dry-run mode, the following 2 commands would have the same outcome.
``` bash
freqtrade trade -c MyConfig.json -s MyStrategy
# is equivalent to
freqtrade trade -c MyConfig.json -s MyStrategy --db-url sqlite:///tradesv3.dryrun.sqlite
```
It means that if you are running the trade command in two different terminals, for example to test your strategy both for trades in USDT and in another instance for trades in BTC, you will have to run them with different databases.
If you specify the URL of a database which does not exist, freqtrade will create one with the name you specified. So to test your custom strategy with BTC and USDT stake currencies, you could use the following commands (in 2 separate terminals):
``` bash
# Terminal 1:
freqtrade trade -c MyConfigBTC.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesBTC.dryrun.sqlite
# Terminal 2:
freqtrade trade -c MyConfigUSDT.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesUSDT.dryrun.sqlite
```
Conversely, if you wish to do the same thing in production mode, you will also have to create at least one new database (in addition to the default one) and specify the path to the "live" databases, for example:
``` bash
# Terminal 1:
freqtrade trade -c MyConfigBTC.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesBTC.live.sqlite
# Terminal 2:
freqtrade trade -c MyConfigUSDT.json -s MyCustomStrategy --db-url sqlite:///user_data/tradesUSDT.live.sqlite
```
For more information regarding usage of the sqlite databases, for example to manually enter or remove trades, please refer to the [SQL Cheatsheet](sql_cheatsheet.md).
### Multiple instances using docker
To run multiple instances of freqtrade using docker you will need to edit the docker-compose.yml file and add all the instances you want as separate services. Remember, you can separate your configuration into multiple files, so it's a good idea to think about making them modular, then if you need to edit something common to all bots, you can do that in a single config file.
``` yml
---
version: '3'
services:
freqtrade1:
image: freqtradeorg/freqtrade:stable
# image: freqtradeorg/freqtrade:develop
# Use plotting image
# image: freqtradeorg/freqtrade:develop_plot
# Build step - only needed when additional dependencies are needed
# build:
# context: .
# dockerfile: "./docker/Dockerfile.custom"
restart: always
container_name: freqtrade1
volumes:
- "./user_data:/freqtrade/user_data"
# Expose api on port 8080 (localhost only)
# Please read the https://www.freqtrade.io/en/latest/rest-api/ documentation
# before enabling this.
ports:
- "127.0.0.1:8080:8080"
# Default command used when running `docker compose up`
command: >
trade
--logfile /freqtrade/user_data/logs/freqtrade1.log
--db-url sqlite:////freqtrade/user_data/tradesv3_freqtrade1.sqlite
--config /freqtrade/user_data/config.json
--config /freqtrade/user_data/config.freqtrade1.json
--strategy SampleStrategy
freqtrade2:
image: freqtradeorg/freqtrade:stable
# image: freqtradeorg/freqtrade:develop
# Use plotting image
# image: freqtradeorg/freqtrade:develop_plot
# Build step - only needed when additional dependencies are needed
# build:
# context: .
# dockerfile: "./docker/Dockerfile.custom"
restart: always
container_name: freqtrade2
volumes:
- "./user_data:/freqtrade/user_data"
# Expose api on port 8080 (localhost only)
# Please read the https://www.freqtrade.io/en/latest/rest-api/ documentation
# before enabling this.
ports:
- "127.0.0.1:8081:8080"
# Default command used when running `docker compose up`
command: >
trade
--logfile /freqtrade/user_data/logs/freqtrade2.log
--db-url sqlite:////freqtrade/user_data/tradesv3_freqtrade2.sqlite
--config /freqtrade/user_data/config.json
--config /freqtrade/user_data/config.freqtrade2.json
--strategy SampleStrategy
```
You can use whatever naming convention you want, freqtrade1 and 2 are arbitrary. Note, that you will need to use different database files, port mappings and telegram configurations for each instance, as mentioned above.
## Use a different database system
Freqtrade is using SQLAlchemy, which supports multiple different database systems. As such, a multitude of database systems should be supported.
Freqtrade does not depend or install any additional database driver. Please refer to the [SQLAlchemy docs](https://docs.sqlalchemy.org/en/14/core/engines.html#database-urls) on installation instructions for the respective database systems.
The following systems have been tested and are known to work with freqtrade:
* sqlite (default)
* PostgreSQL
* MariaDB
!!! Warning
By using one of the below database systems, you acknowledge that you know how to manage such a system. The freqtrade team will not provide any support with setup or maintenance (or backups) of the below database systems.
### PostgreSQL
Installation:
`pip install psycopg2-binary`
Usage:
`... --db-url postgresql+psycopg2://<username>:<password>@localhost:5432/<database>`
Freqtrade will automatically create the tables necessary upon startup.
If you're running different instances of Freqtrade, you must either setup one database per Instance or use different users / schemas for your connections.
### MariaDB / MySQL
Freqtrade supports MariaDB by using SQLAlchemy, which supports multiple different database systems.
Installation:
`pip install pymysql`
Usage:
`... --db-url mysql+pymysql://<username>:<password>@localhost:3306/<database>`
## Configure the bot running as a systemd service
Copy the `freqtrade.service` file to your systemd user directory (usually `~/.config/systemd/user`) and update `WorkingDirectory` and `ExecStart` to match your setup.
!!! Note
Certain systems (like Raspbian) don't load service unit files from the user directory. In this case, copy `freqtrade.service` into `/etc/systemd/user/` (requires superuser permissions).
After that you can start the daemon with:
```bash
systemctl --user start freqtrade
```
For this to be persistent (run when user is logged out) you'll need to enable `linger` for your freqtrade user.
```bash
sudo loginctl enable-linger "$USER"
```
If you run the bot as a service, you can use systemd service manager as a software watchdog monitoring freqtrade bot
state and restarting it in the case of failures. If the `internals.sd_notify` parameter is set to true in the
configuration or the `--sd-notify` command line option is used, the bot will send keep-alive ping messages to systemd
using the sd_notify (systemd notifications) protocol and will also tell systemd its current state (Running, Paused or Stopped)
when it changes.
The `freqtrade.service.watchdog` file contains an example of the service unit configuration file which uses systemd
as the watchdog.
!!! Note
The sd_notify communication between the bot and the systemd service manager will not work if the bot runs in a Docker container.
## Advanced Logging
Freqtrade uses the default logging module provided by python.
Python allows for extensive [logging configuration](https://docs.python.org/3/library/logging.config.html#logging.config.dictConfig) in this regards - way more than what can be covered here.
Default logging (Colored terminal output) is setup by default if no `log_config` is provided.
Using `--logfile logfile.log` will enable the RotatingFileHandler.
If you're not content with the log format - or with the default settings provided for the RotatingFileHandler, you can customize logging to your liking.
The default configuration looks roughly like the below - with the file handler being provided - but not enabled.
``` json hl_lines="5-7 13-16 27"
{
"log_config": {
"version": 1,
"formatters": {
"basic": {
"format": "%(message)s"
},
"standard": {
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
"console": {
"class": "freqtrade.loggers.ft_rich_handler.FtRichHandler",
"formatter": "basic"
},
"file": {
"class": "logging.handlers.RotatingFileHandler",
"formatter": "standard",
// "filename": "someRandomLogFile.log",
"maxBytes": 10485760,
"backupCount": 10
}
},
"root": {
"handlers": [
"console",
// "file"
],
"level": "INFO",
}
}
}
```
!!! Note "highlighted lines"
Highlighted lines in the above code-block define the Rich handler and belong together.
The formatter "standard" and "file" will belong to the FileHandler.
Each handler must use one of the defined formatters (by name) - and it's class must be available and a valid logging class.
To actually use a handler - it must be in the "handlers" section inside the "root" segment.
If this section is left out, freqtrade will provide no output (in the non-configured handler, anyway).
!!! Tip "Explicit log configuration"
We recommend to extract the logging configuration from your main configuration, and provide it to your bot via [multiple configuration files](configuration.md#multiple-configuration-files) functionality. This will avoid unnecessary code duplication.
---
On many Linux systems the bot can be configured to send its log messages to `syslog` or `journald` system services. Logging to a remote `syslog` server is also available on Windows. The special values for the `--logfile` command line option can be used for this.
### Logging to syslog
To send Freqtrade log messages to a local or remote `syslog` service use the `"log_config"` setup option to configure logging.
``` json
{
// ...
"log_config": {
"version": 1,
"formatters": {
"syslog_fmt": {
"format": "%(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
// Other handlers?
"syslog": {
"class": "logging.handlers.SysLogHandler",
"formatter": "syslog_fmt",
// Use one of the other options above as address instead?
"address": "/dev/log"
}
},
"root": {
"handlers": [
// other handlers
"syslog",
]
}
}
}
```
[Additional log-handlers](#advanced-logging) may need to be configured to for example also have log output in the console.
#### Syslog usage
Log messages are send to `syslog` with the `user` facility. So you can see them with the following commands:
* `tail -f /var/log/user`, or
* install a comprehensive graphical viewer (for instance, 'Log File Viewer' for Ubuntu).
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both syslog or journald can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
For `rsyslog` the messages from the bot can be redirected into a separate dedicated log file. To achieve this, add
```
if $programname startswith "freqtrade" then -/var/log/freqtrade.log
```
to one of the rsyslog configuration files, for example at the end of the `/etc/rsyslog.d/50-default.conf`.
For `syslog` (`rsyslog`), the reduction mode can be switched on. This will reduce the number of repeating messages. For instance, multiple bot Heartbeat messages will be reduced to a single message when nothing else happens with the bot. To achieve this, set in `/etc/rsyslog.conf`:
```
# Filter duplicated messages
$RepeatedMsgReduction on
```
#### Syslog addressing
The syslog address can be either a Unix domain socket (socket filename) or a UDP socket specification, consisting of IP address and UDP port, separated by the `:` character.
So, the following are the examples of possible addresses:
* `"address": "/dev/log"` -- log to syslog (rsyslog) using the `/dev/log` socket, suitable for most systems.
* `"address": "/var/run/syslog"` -- log to syslog (rsyslog) using the `/var/run/syslog` socket. Use this on MacOS.
* `"address": "localhost:514"` -- log to local syslog using UDP socket, if it listens on port 514.
* `"address": "<ip>:514"` -- log to remote syslog at IP address and port 514. This may be used on Windows for remote logging to an external syslog server.
??? Info "Deprecated - configure syslog via command line"
`--logfile syslog:<syslog_address>` -- send log messages to `syslog` service using the `<syslog_address>` as the syslog address.
The syslog address can be either a Unix domain socket (socket filename) or a UDP socket specification, consisting of IP address and UDP port, separated by the `:` character.
So, the following are the examples of possible usages:
* `--logfile syslog:/dev/log` -- log to syslog (rsyslog) using the `/dev/log` socket, suitable for most systems.
* `--logfile syslog` -- same as above, the shortcut for `/dev/log`.
* `--logfile syslog:/var/run/syslog` -- log to syslog (rsyslog) using the `/var/run/syslog` socket. Use this on MacOS.
* `--logfile syslog:localhost:514` -- log to local syslog using UDP socket, if it listens on port 514.
* `--logfile syslog:<ip>:514` -- log to remote syslog at IP address and port 514. This may be used on Windows for remote logging to an external syslog server.
### Logging to journald
This needs the `cysystemd` python package installed as dependency (`pip install cysystemd`), which is not available on Windows. Hence, the whole journald logging functionality is not available for a bot running on Windows.
To send Freqtrade log messages to `journald` system service, add the following configuration snippet to your configuration.
``` json
{
// ...
"log_config": {
"version": 1,
"formatters": {
"journald_fmt": {
"format": "%(name)s - %(levelname)s - %(message)s"
}
},
"handlers": {
// Other handlers?
"journald": {
"class": "cysystemd.journal.JournaldLogHandler",
"formatter": "journald_fmt",
}
},
"root": {
"handlers": [
// ..
"journald",
]
}
}
}
```
[Additional log-handlers](#advanced-logging) may need to be configured to for example also have log output in the console.
Log messages are send to `journald` with the `user` facility. So you can see them with the following commands:
* `journalctl -f` -- shows Freqtrade log messages sent to `journald` along with other log messages fetched by `journald`.
* `journalctl -f -u freqtrade.service` -- this command can be used when the bot is run as a `systemd` service.
There are many other options in the `journalctl` utility to filter the messages, see manual pages for this utility.
On many systems `syslog` (`rsyslog`) fetches data from `journald` (and vice versa), so both `--logfile syslog` or `--logfile journald` can be used and the messages be viewed with both `journalctl` and a syslog viewer utility. You can combine this in any way which suites you better.
??? Info "Deprecated - configure journald via command line"
To send Freqtrade log messages to `journald` system service use the `--logfile` command line option with the value in the following format:
`--logfile journald` -- send log messages to `journald`.
### Log format as JSON
You can also configure the default output stream to use JSON format instead.
The "fmt_dict" attribute defines the keys for the json output - as well as the [python logging LogRecord attributes](https://docs.python.org/3/library/logging.html#logrecord-attributes).
The below configuration will change the default output to JSON. The same formatter could however also be used in combination with the `RotatingFileHandler`.
We recommend to keep one format in human readable form.
``` json
{
// ...
"log_config": {
"version": 1,
"formatters": {
"json": {
"()": "freqtrade.loggers.json_formatter.JsonFormatter",
"fmt_dict": {
"timestamp": "asctime",
"level": "levelname",
"logger": "name",
"message": "message"
}
}
},
"handlers": {
// Other handlers?
"jsonStream": {
"class": "logging.StreamHandler",
"formatter": "json"
}
},
"root": {
"handlers": [
// ..
"jsonStream",
]
}
}
}
```

Binary file not shown.

After

Width:  |  Height:  |  Size: 60 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid meet" viewBox="0 0 90 90" width="100" height="100"><defs><path d="M0 90L0 0L90 0L90 90L0 90ZM50 60L60 60L60 80L70 80L70 60L80 60L80 50L50 50L50 60ZM30 80L40 80L40 70L30 70L30 80ZM30 60L20 60L20 70L10 70L10 80L20 80L20 70L30 70L30 60L40 60L40 50L30 50L30 60ZM10 60L20 60L20 50L10 50L10 60ZM10 40L40 40L40 30L20 30L20 20L40 20L40 10L10 10L10 40ZM50 40L80 40L80 30L60 30L60 20L80 20L80 10L50 10L50 40Z" id="c6g67PWSoP"></path></defs><g><g><g><use xlink:href="#c6g67PWSoP" opacity="1" fill="#000000" fill-opacity="1"></use></g></g></g></svg>

After

Width:  |  Height:  |  Size: 818 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 91 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 133 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 135 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 242 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 241 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 209 KiB

BIN
docs/assets/freqai_DI.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 307 KiB

BIN
docs/assets/freqai_algo.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 345 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 490 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 66 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 2.0 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 458 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 270 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 185 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 142 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 80 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 79 KiB

BIN
docs/assets/frequi_url.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 211 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 190 KiB

BIN
docs/assets/plot-profit.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 143 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

BIN
docs/assets/tensorboard.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 362 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 92 KiB

594
docs/backtesting.md Normal file
View File

@ -0,0 +1,594 @@
# Backtesting
This page explains how to validate your strategy performance by using Backtesting.
Backtesting requires historic data to be available.
To learn how to get data for the pairs and exchange you're interested in, head over to the [Data Downloading](data-download.md) section of the documentation.
## Backtesting command reference
--8<-- "commands/backtesting.md"
## Test your strategy with Backtesting
Now you have good Entry and exit strategies and some historic data, you want to test it against
real data. This is what we call [backtesting](https://en.wikipedia.org/wiki/Backtesting).
Backtesting will use the crypto-currencies (pairs) from your config file and load historical candle (OHLCV) data from `user_data/data/<exchange>` by default.
If no data is available for the exchange / pair / timeframe combination, backtesting will ask you to download them first using `freqtrade download-data`.
For details on downloading, please refer to the [Data Downloading](data-download.md) section in the documentation.
The result of backtesting will confirm if your bot has better odds of making a profit than a loss.
All profit calculations include fees, and freqtrade will use the exchange's default fees for the calculation.
!!! Warning "Using dynamic pairlists for backtesting"
Using dynamic pairlists is possible (not all of the handlers are allowed to be used in backtest mode), however it relies on the current market conditions - which will not reflect the historic status of the pairlist.
Also, when using pairlists other than StaticPairlist, reproducibility of backtesting-results cannot be guaranteed.
Please read the [pairlists documentation](plugins.md#pairlists) for more information.
To achieve reproducible results, best generate a pairlist via the [`test-pairlist`](utils.md#test-pairlist) command and use that as static pairlist.
!!! Note
By default, Freqtrade will export backtesting results to `user_data/backtest_results`.
The exported trades can be used for [further analysis](#further-backtest-result-analysis) or can be used by the [plotting sub-command](plotting.md#plot-price-and-indicators) (`freqtrade plot-dataframe`) in the scripts directory.
### Starting balance
Backtesting will require a starting balance, which can be provided as `--dry-run-wallet <balance>` or `--starting-balance <balance>` command line argument, or via `dry_run_wallet` configuration setting.
This amount must be higher than `stake_amount`, otherwise the bot will not be able to simulate any trade.
### Dynamic stake amount
Backtesting supports [dynamic stake amount](configuration.md#dynamic-stake-amount) by configuring `stake_amount` as `"unlimited"`, which will split the starting balance into `max_open_trades` pieces.
Profits from early trades will result in subsequent higher stake amounts, resulting in compounding of profits over the backtesting period.
### Example backtesting commands
With 5 min candle (OHLCV) data (per default)
```bash
freqtrade backtesting --strategy AwesomeStrategy
```
Where `--strategy AwesomeStrategy` / `-s AwesomeStrategy` refers to the class name of the strategy, which is within a python file in the `user_data/strategies` directory.
---
With 1 min candle (OHLCV) data
```bash
freqtrade backtesting --strategy AwesomeStrategy --timeframe 1m
```
---
Providing a custom starting balance of 1000 (in stake currency)
```bash
freqtrade backtesting --strategy AwesomeStrategy --dry-run-wallet 1000
```
---
Using a different on-disk historical candle (OHLCV) data source
Assume you downloaded the history data from the Binance exchange and kept it in the `user_data/data/binance-20180101` directory.
You can then use this data for backtesting as follows:
```bash
freqtrade backtesting --strategy AwesomeStrategy --datadir user_data/data/binance-20180101
```
---
Comparing multiple Strategies
```bash
freqtrade backtesting --strategy-list SampleStrategy1 AwesomeStrategy --timeframe 5m
```
Where `SampleStrategy1` and `AwesomeStrategy` refer to class names of strategies.
---
Prevent exporting trades to file
```bash
freqtrade backtesting --strategy backtesting --export none --config config.json
```
Only use this if you're sure you'll not want to plot or analyze your results further.
---
Exporting trades to file specifying a custom filename
```bash
freqtrade backtesting --strategy backtesting --export trades --export-filename=backtest_samplestrategy.json
```
Please also read about the [strategy startup period](strategy-customization.md#strategy-startup-period).
---
Supplying custom fee value
Sometimes your account has certain fee rebates (fee reductions starting with a certain account size or monthly volume), which are not visible to ccxt.
To account for this in backtesting, you can use the `--fee` command line option to supply this value to backtesting.
This fee must be a ratio, and will be applied twice (once for trade entry, and once for trade exit).
For example, if the commission fee per order is 0.1% (i.e., 0.001 written as ratio), then you would run backtesting as the following:
```bash
freqtrade backtesting --fee 0.001
```
!!! Note
Only supply this option (or the corresponding configuration parameter) if you want to experiment with different fee values. By default, Backtesting fetches the default fee from the exchange pair/market info.
---
Running backtest with smaller test-set by using timerange
Use the `--timerange` argument to change how much of the test-set you want to use.
For example, running backtesting with the `--timerange=20190501-` option will use all available data starting with May 1st, 2019 from your input data.
```bash
freqtrade backtesting --timerange=20190501-
```
You can also specify particular date ranges.
The full timerange specification:
- Use data until 2018/01/31: `--timerange=-20180131`
- Use data since 2018/01/31: `--timerange=20180131-`
- Use data since 2018/01/31 till 2018/03/01 : `--timerange=20180131-20180301`
- Use data between POSIX / epoch timestamps 1527595200 1527618600: `--timerange=1527595200-1527618600`
## Understand the backtesting result
The most important in the backtesting is to understand the result.
A backtesting result will look like that:
```
================================================ BACKTESTING REPORT =================================================
| Pair | Trades | Avg Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Wins Draws Loss Win% |
|----------+--------+----------------+------------------+----------------+--------------+--------------------------|
| ADA/BTC | 35 | -0.11 | -0.00019428 | -1.94 | 4:35:00 | 14 0 21 40.0 |
| ARK/BTC | 11 | -0.41 | -0.00022647 | -2.26 | 2:03:00 | 3 0 8 27.3 |
| BTS/BTC | 32 | 0.31 | 0.00048938 | 4.89 | 5:05:00 | 18 0 14 56.2 |
| DASH/BTC | 13 | -0.08 | -0.00005343 | -0.53 | 4:39:00 | 6 0 7 46.2 |
| ENG/BTC | 18 | 1.36 | 0.00122807 | 12.27 | 2:50:00 | 8 0 10 44.4 |
| EOS/BTC | 36 | 0.08 | 0.00015304 | 1.53 | 3:34:00 | 16 0 20 44.4 |
| ETC/BTC | 26 | 0.37 | 0.00047576 | 4.75 | 6:14:00 | 11 0 15 42.3 |
| ETH/BTC | 33 | 0.30 | 0.00049856 | 4.98 | 7:31:00 | 16 0 17 48.5 |
| IOTA/BTC | 32 | 0.03 | 0.00005444 | 0.54 | 3:12:00 | 14 0 18 43.8 |
| LSK/BTC | 15 | 1.75 | 0.00131413 | 13.13 | 2:58:00 | 6 0 9 40.0 |
| LTC/BTC | 32 | -0.04 | -0.00006886 | -0.69 | 4:49:00 | 11 0 21 34.4 |
| NANO/BTC | 17 | 1.26 | 0.00107058 | 10.70 | 1:55:00 | 10 0 7 58.5 |
| NEO/BTC | 23 | 0.82 | 0.00094936 | 9.48 | 2:59:00 | 10 0 13 43.5 |
| REQ/BTC | 9 | 1.17 | 0.00052734 | 5.27 | 3:47:00 | 4 0 5 44.4 |
| XLM/BTC | 16 | 1.22 | 0.00097800 | 9.77 | 3:15:00 | 7 0 9 43.8 |
| XMR/BTC | 23 | -0.18 | -0.00020696 | -2.07 | 5:30:00 | 12 0 11 52.2 |
| XRP/BTC | 35 | 0.66 | 0.00114897 | 11.48 | 3:49:00 | 12 0 23 34.3 |
| ZEC/BTC | 22 | -0.46 | -0.00050971 | -5.09 | 2:22:00 | 7 0 15 31.8 |
| TOTAL | 429 | 0.36 | 0.00762792 | 76.20 | 4:12:00 | 186 0 243 43.4 |
============================================= LEFT OPEN TRADES REPORT =============================================
| Pair | Trades | Avg Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Win Draw Loss Win% |
|----------+---------+----------------+------------------+----------------+----------------+---------------------|
| ADA/BTC | 1 | 0.89 | 0.00004434 | 0.44 | 6:00:00 | 1 0 0 100 |
| LTC/BTC | 1 | 0.68 | 0.00003421 | 0.34 | 2:00:00 | 1 0 0 100 |
| TOTAL | 2 | 0.78 | 0.00007855 | 0.78 | 4:00:00 | 2 0 0 100 |
==================== EXIT REASON STATS ====================
| Exit Reason | Exits | Wins | Draws | Losses |
|--------------------+---------+-------+--------+---------|
| trailing_stop_loss | 205 | 150 | 0 | 55 |
| stop_loss | 166 | 0 | 0 | 166 |
| exit_signal | 56 | 36 | 0 | 20 |
| force_exit | 2 | 0 | 0 | 2 |
================== SUMMARY METRICS ==================
| Metric | Value |
|-----------------------------+---------------------|
| Backtesting from | 2019-01-01 00:00:00 |
| Backtesting to | 2019-05-01 00:00:00 |
| Trading Mode | Spot |
| Max open trades | 3 |
| | |
| Total/Daily Avg Trades | 429 / 3.575 |
| Starting balance | 0.01000000 BTC |
| Final balance | 0.01762792 BTC |
| Absolute profit | 0.00762792 BTC |
| Total profit % | 76.2% |
| CAGR % | 460.87% |
| Sortino | 1.88 |
| Sharpe | 2.97 |
| Calmar | 6.29 |
| SQN | 2.45 |
| Profit factor | 1.11 |
| Expectancy (Ratio) | -0.15 (-0.05) |
| Avg. stake amount | 0.001 BTC |
| Total trade volume | 0.429 BTC |
| | |
| Long / Short | 352 / 77 |
| Total profit Long % | 1250.58% |
| Total profit Short % | -15.02% |
| Absolute profit Long | 0.00838792 BTC |
| Absolute profit Short | -0.00076 BTC |
| | |
| Best Pair | LSK/BTC 26.26% |
| Worst Pair | ZEC/BTC -10.18% |
| Best Trade | LSK/BTC 4.25% |
| Worst Trade | ZEC/BTC -10.25% |
| Best day | 0.00076 BTC |
| Worst day | -0.00036 BTC |
| Days win/draw/lose | 12 / 82 / 25 |
| Avg. Duration Winners | 4:23:00 |
| Avg. Duration Loser | 6:55:00 |
| Max Consecutive Wins / Loss | 3 / 4 |
| Rejected Entry signals | 3089 |
| Entry/Exit Timeouts | 0 / 0 |
| Canceled Trade Entries | 34 |
| Canceled Entry Orders | 123 |
| Replaced Entry Orders | 89 |
| | |
| Min balance | 0.00945123 BTC |
| Max balance | 0.01846651 BTC |
| Max % of account underwater | 25.19% |
| Absolute Drawdown (Account) | 13.33% |
| Drawdown | 0.0015 BTC |
| Drawdown high | 0.0013 BTC |
| Drawdown low | -0.0002 BTC |
| Drawdown Start | 2019-02-15 14:10:00 |
| Drawdown End | 2019-04-11 18:15:00 |
| Market change | -5.88% |
=====================================================
```
### Backtesting report table
The 1st table contains all trades the bot made, including "left open trades".
The last line will give you the overall performance of your strategy,
here:
```
| TOTAL | 429 | 0.36 | 152.41 | 0.00762792 | 76.20 | 4:12:00 | 186 0 243 43.4 |
```
The bot has made `429` trades for an average duration of `4:12:00`, with a performance of `76.20%` (profit), that means it has
earned a total of `0.00762792 BTC` starting with a capital of 0.01 BTC.
The column `Avg Profit %` shows the average profit for all trades made.
The column `Tot Profit %` shows instead the total profit % in relation to the starting balance.
In the above results, we have a starting balance of 0.01 BTC and an absolute profit of 0.00762792 BTC - so the `Tot Profit %` will be `(0.00762792 / 0.01) * 100 ~= 76.2%`.
Your strategy performance is influenced by your entry strategy, your exit strategy, and also by the `minimal_roi` and `stop_loss` you have set.
For example, if your `minimal_roi` is only `"0": 0.01` you cannot expect the bot to make more profit than 1% (because it will exit every time a trade reaches 1%).
```json
"minimal_roi": {
"0": 0.01
},
```
On the other hand, if you set a too high `minimal_roi` like `"0": 0.55`
(55%), there is almost no chance that the bot will ever reach this profit.
Hence, keep in mind that your performance is an integral mix of all different elements of the strategy, your configuration, and the crypto-currency pairs you have set up.
### Exit reasons table
The 2nd table contains a recap of exit reasons.
This table can tell you which area needs some additional work (e.g. all or many of the `exit_signal` trades are losses, so you should work on improving the exit signal, or consider disabling it).
### Left open trades table
The 3rd table contains all trades the bot had to `force_exit` at the end of the backtesting period to present you the full picture.
This is necessary to simulate realistic behavior, since the backtest period has to end at some point, while realistically, you could leave the bot running forever.
These trades are also included in the first table, but are also shown separately in this table for clarity.
### Summary metrics
The last element of the backtest report is the summary metrics table.
It contains some useful key metrics about performance of your strategy on backtesting data.
```
================== SUMMARY METRICS ==================
| Metric | Value |
|-----------------------------+---------------------|
| Backtesting from | 2019-01-01 00:00:00 |
| Backtesting to | 2019-05-01 00:00:00 |
| Trading Mode | Spot |
| Max open trades | 3 |
| | |
| Total/Daily Avg Trades | 429 / 3.575 |
| Starting balance | 0.01000000 BTC |
| Final balance | 0.01762792 BTC |
| Absolute profit | 0.00762792 BTC |
| Total profit % | 76.2% |
| CAGR % | 460.87% |
| Sortino | 1.88 |
| Sharpe | 2.97 |
| Calmar | 6.29 |
| SQN | 2.45 |
| Profit factor | 1.11 |
| Expectancy (Ratio) | -0.15 (-0.05) |
| Avg. stake amount | 0.001 BTC |
| Total trade volume | 0.429 BTC |
| | |
| Long / Short | 352 / 77 |
| Total profit Long % | 1250.58% |
| Total profit Short % | -15.02% |
| Absolute profit Long | 0.00838792 BTC |
| Absolute profit Short | -0.00076 BTC |
| | |
| Best Pair | LSK/BTC 26.26% |
| Worst Pair | ZEC/BTC -10.18% |
| Best Trade | LSK/BTC 4.25% |
| Worst Trade | ZEC/BTC -10.25% |
| Best day | 0.00076 BTC |
| Worst day | -0.00036 BTC |
| Days win/draw/lose | 12 / 82 / 25 |
| Avg. Duration Winners | 4:23:00 |
| Avg. Duration Loser | 6:55:00 |
| Max Consecutive Wins / Loss | 3 / 4 |
| Rejected Entry signals | 3089 |
| Entry/Exit Timeouts | 0 / 0 |
| Canceled Trade Entries | 34 |
| Canceled Entry Orders | 123 |
| Replaced Entry Orders | 89 |
| | |
| Min balance | 0.00945123 BTC |
| Max balance | 0.01846651 BTC |
| Max % of account underwater | 25.19% |
| Absolute Drawdown (Account) | 13.33% |
| Drawdown | 0.0015 BTC |
| Drawdown high | 0.0013 BTC |
| Drawdown low | -0.0002 BTC |
| Drawdown Start | 2019-02-15 14:10:00 |
| Drawdown End | 2019-04-11 18:15:00 |
| Market change | -5.88% |
=====================================================
```
- `Backtesting from` / `Backtesting to`: Backtesting range (usually defined with the `--timerange` option).
- `Max open trades`: Setting of `max_open_trades` (or `--max-open-trades`) - or number of pairs in the pairlist (whatever is lower).
- `Trading Mode`: Spot or Futures trading.
- `Total/Daily Avg Trades`: Identical to the total trades of the backtest output table / Total trades divided by the backtesting duration in days (this will give you information about how many trades to expect from the strategy).
- `Starting balance`: Start balance - as given by dry-run-wallet (config or command line).
- `Final balance`: Final balance - starting balance + absolute profit.
- `Absolute profit`: Profit made in stake currency.
- `Total profit %`: Total profit. Aligned to the `TOTAL` row's `Tot Profit %` from the first table. Calculated as `(End capital Starting capital) / Starting capital`.
- `CAGR %`: Compound annual growth rate.
- `Sortino`: Annualized Sortino ratio.
- `Sharpe`: Annualized Sharpe ratio.
- `Calmar`: Annualized Calmar ratio.
- `SQN`: System Quality Number (SQN) - by Van Tharp.
- `Profit factor`: profit / loss.
- `Avg. stake amount`: Average stake amount, either `stake_amount` or the average when using dynamic stake amount.
- `Total trade volume`: Volume generated on the exchange to reach the above profit.
- `Best Pair` / `Worst Pair`: Best and worst performing pair, and it's corresponding `Tot Profit %`.
- `Best Trade` / `Worst Trade`: Biggest single winning trade and biggest single losing trade.
- `Best day` / `Worst day`: Best and worst day based on daily profit.
- `Days win/draw/lose`: Winning / Losing days (draws are usually days without closed trade).
- `Avg. Duration Winners` / `Avg. Duration Loser`: Average durations for winning and losing trades.
- `Max Consecutive Wins / Loss`: Maximum consecutive wins/losses in a row.
- `Rejected Entry signals`: Trade entry signals that could not be acted upon due to `max_open_trades` being reached.
- `Entry/Exit Timeouts`: Entry/exit orders which did not fill (only applicable if custom pricing is used).
- `Canceled Trade Entries`: Number of trades that have been canceled by user request via `adjust_entry_price`.
- `Canceled Entry Orders`: Number of entry orders that have been canceled by user request via `adjust_entry_price`.
- `Replaced Entry Orders`: Number of entry orders that have been replaced by user request via `adjust_entry_price`.
- `Min balance` / `Max balance`: Lowest and Highest Wallet balance during the backtest period.
- `Max % of account underwater`: Maximum percentage your account has decreased from the top since the simulation started.
Calculated as the maximum of `(Max Balance - Current Balance) / (Max Balance)`.
- `Absolute Drawdown (Account)`: Maximum Account Drawdown experienced. Calculated as `(Absolute Drawdown) / (DrawdownHigh + startingBalance)`.
- `Drawdown`: Maximum, absolute drawdown experienced. Difference between Drawdown High and Subsequent Low point.
- `Drawdown high` / `Drawdown low`: Profit at the beginning and end of the largest drawdown period. A negative low value means initial capital lost.
- `Drawdown Start` / `Drawdown End`: Start and end datetime for this largest drawdown (can also be visualized via the `plot-dataframe` sub-command).
- `Market change`: Change of the market during the backtest period. Calculated as average of all pairs changes from the first to the last candle using the "close" column.
- `Long / Short`: Split long/short values (Only shown when short trades were made).
- `Total profit Long %` / `Absolute profit Long`: Profit long trades only (Only shown when short trades were made).
- `Total profit Short %` / `Absolute profit Short`: Profit short trades only (Only shown when short trades were made).
### Daily / Weekly / Monthly breakdown
You can get an overview over daily / weekly or monthly results by using the `--breakdown <>` switch.
To visualize daily and weekly breakdowns, you can use the following:
``` bash
freqtrade backtesting --strategy MyAwesomeStrategy --breakdown day week
```
``` output
======================== DAY BREAKDOWN =========================
| Day | Tot Profit USDT | Wins | Draws | Losses |
|------------+-------------------+--------+---------+----------|
| 03/07/2021 | 200.0 | 2 | 0 | 0 |
| 04/07/2021 | -50.31 | 0 | 0 | 2 |
| 05/07/2021 | 220.611 | 3 | 2 | 0 |
| 06/07/2021 | 150.974 | 3 | 0 | 2 |
| 07/07/2021 | -70.193 | 1 | 0 | 2 |
| 08/07/2021 | 212.413 | 2 | 0 | 3 |
```
The output will show a table containing the realized absolute Profit (in stake currency) for the given timeperiod, as well as wins, draws and losses that materialized (closed) on this day. Below that there will be a second table for the summarized values of weeks indicated by the date of the closing Sunday. The same would apply to a monthly breakdown indicated by the last day of the month.
### Backtest result caching
To save time, by default backtest will reuse a cached result from within the last day when the backtested strategy and config match that of a previous backtest. To force a new backtest despite existing result for an identical run specify `--cache none` parameter.
!!! Warning
Caching is automatically disabled for open-ended timeranges (`--timerange 20210101-`), as freqtrade cannot ensure reliably that the underlying data didn't change. It can also use cached results where it shouldn't if the original backtest had missing data at the end, which was fixed by downloading more data.
In this instance, please use `--cache none` once to force a fresh backtest.
### Further backtest-result analysis
To further analyze your backtest results, freqtrade will export the trades to file by default.
You can then load the trades to perform further analysis as shown in the [data analysis](strategy_analysis_example.md#load-backtest-results-to-pandas-dataframe) backtesting section.
### Backtest output file
The output file freqtrade produces is a zip file containing the following files:
- The backtest report in json format
- the market change data in feather format
- a copy of the strategy file
- a copy of the strategy parameters (if a parameter file was used)
- a sanitized copy of the config file
This will ensure results are reproducible - under the assumption that the same data is available.
Only the strategy file and the config file are included in the zip file, eventual dependencies are not included.
## Assumptions made by backtesting
Since backtesting lacks some detailed information about what happens within a candle, it needs to take a few assumptions:
- Exchange [trading limits](#trading-limits-in-backtesting) are respected
- Entries happen at open-price unless a custom price logic has been specified
- All orders are filled at the requested price (no slippage) as long as the price is within the candle's high/low range
- Exit-signal exits happen at open-price of the consecutive candle
- Exits free their trade slot for a new trade with a different pair
- Exit-signal is favored over Stoploss, because exit-signals are assumed to trigger on candle's open
- ROI
- Exits are compared to high - but the ROI value is used (e.g. ROI = 2%, high=5% - so the exit will be at 2%)
- Exits are never "below the candle", so a ROI of 2% may result in a exit at 2.4% if low was at 2.4% profit
- ROI entries which came into effect on the triggering candle (e.g. `120: 0.02` for 1h candles, from `60: 0.05`) will use the candle's open as exit rate
- Force-exits caused by `<N>=-1` ROI entries use low as exit value, unless N falls on the candle open (e.g. `120: -1` for 1h candles)
- Stoploss exits happen exactly at stoploss price, even if low was lower, but the loss will be `2 * fees` higher than the stoploss price
- Stoploss is evaluated before ROI within one candle. So you can often see more trades with the `stoploss` exit reason comparing to the results obtained with the same strategy in the Dry Run/Live Trade modes
- Low happens before high for stoploss, protecting capital first
- Trailing stoploss
- Trailing Stoploss is only adjusted if it's below the candle's low (otherwise it would be triggered)
- On trade entry candles that trigger trailing stoploss, the "minimum offset" (`stop_positive_offset`) is assumed (instead of high) - and the stop is calculated from this point. This rule is NOT applicable to custom-stoploss scenarios, since there's no information about the stoploss logic available.
- High happens first - adjusting stoploss
- Low uses the adjusted stoploss (so exits with large high-low difference are backtested correctly)
- ROI applies before trailing-stop, ensuring profits are "top-capped" at ROI if both ROI and trailing stop applies
- Exit-reason does not explain if a trade was positive or negative, just what triggered the exit (this can look odd if negative ROI values are used)
- Evaluation sequence (if multiple signals happen on the same candle)
- Exit-signal
- Stoploss
- ROI
- Trailing stoploss
- Position reversals (futures only) happen if an entry signal in the other direction than the closing trade triggers at the candle the existing trade closes.
Taking these assumptions, backtesting tries to mirror real trading as closely as possible. However, backtesting will **never** replace running a strategy in dry-run mode.
Also, keep in mind that past results don't guarantee future success.
In addition to the above assumptions, strategy authors should carefully read the [Common Mistakes](strategy-customization.md#common-mistakes-when-developing-strategies) section, to avoid using data in backtesting which is not available in real market conditions.
### Trading limits in backtesting
Exchanges have certain trading limits, like minimum (and maximum) base currency, or minimum/maximum stake (quote) currency.
These limits are usually listed in the exchange documentation as "trading rules" or similar and can be quite different between different pairs.
Backtesting (as well as live and dry-run) does honor these limits, and will ensure that a stoploss can be placed below this value - so the value will be slightly higher than what the exchange specifies.
Freqtrade has however no information about historic limits.
This can lead to situations where trading-limits are inflated by using a historic price, resulting in minimum amounts > 50\$.
For example:
BTC minimum tradable amount is 0.001.
BTC trades at 22.000\$ today (0.001 BTC is related to this) - but the backtesting period includes prices as high as 50.000\$.
Today's minimum would be `0.001 * 22_000` - or 22\$.
However the limit could also be 50$ - based on `0.001 * 50_000` in some historic setting.
#### Trading precision limits
Most exchanges pose precision limits on both price and amounts, so you cannot buy 1.0020401 of a pair, or at a price of 1.24567123123.
Instead, these prices and amounts will be rounded or truncated (based on the exchange definition) to the defined trading precision.
The above values may for example be rounded to an amount of 1.002, and a price of 1.24567.
These precision values are based on current exchange limits (as described in the [above section](#trading-limits-in-backtesting)), as historic precision limits are not available.
## Improved backtest accuracy
One big limitation of backtesting is it's inability to know how prices moved intra-candle (was high before close, or vice-versa?).
So assuming you run backtesting with a 1h timeframe, there will be 4 prices for that candle (Open, High, Low, Close).
While backtesting does take some assumptions (read above) about this - this can never be perfect, and will always be biased in one way or the other.
To mitigate this, freqtrade can use a lower (faster) timeframe to simulate intra-candle movements.
To utilize this, you can append `--timeframe-detail 5m` to your regular backtesting command.
``` bash
freqtrade backtesting --strategy AwesomeStrategy --timeframe 1h --timeframe-detail 5m
```
This will load 1h data (the main timeframe) as well as 5m data (detail timeframe) for the selected timerange.
The strategy will be analyzed with the 1h timeframe.
Candles where activity may take place (there's an active signal, the pair is in a trade) are evaluated at the 5m timeframe.
This will allow for a more accurate simulation of intra-candle movements - and can lead to different results, especially on higher timeframes.
Entries will generally still happen at the main candle's open, however freed trade slots may be freed earlier (if the exit signal is triggered on the 5m candle), which can then be used for a new trade of a different pair.
All callback functions (`custom_exit()`, `custom_stoploss()`, ... ) will be running for each 5m candle once the trade is opened (so 12 times in the above example of 1h timeframe, and 5m detailed timeframe).
`--timeframe-detail` must be smaller than the original timeframe, otherwise backtesting will fail to start.
Obviously this will require more memory (5m data is bigger than 1h data), and will also impact runtime (depending on the amount of trades and trade durations).
Also, data must be available / downloaded already.
!!! Tip
You can use this function as the last part of strategy development, to ensure your strategy is not exploiting one of the [backtesting assumptions](#assumptions-made-by-backtesting). Strategies that perform similarly well with this mode have a good chance to perform well in dry/live modes too (although only forward-testing (dry-mode) can really confirm a strategy).
??? Sample "Extreme Difference Example"
Using `--timeframe-detail` on an extreme example (all below pairs have the 10:00 candle with an entry signal) may lead to the following backtesting Trade sequence with 1 max_open_trades:
| Pair | Entry Time | Exit Time | Duration |
|------|------------|-----------| -------- |
| BTC/USDT | 2024-01-01 10:00:00 | 2021-01-01 10:05:00 | 5m |
| ETH/USDT | 2024-01-01 10:05:00 | 2021-01-01 10:15:00 | 10m |
| XRP/USDT | 2024-01-01 10:15:00 | 2021-01-01 10:30:00 | 15m |
| SOL/USDT | 2024-01-01 10:15:00 | 2021-01-01 11:05:00 | 50m |
| BTC/USDT | 2024-01-01 11:05:00 | 2021-01-01 12:00:00 | 55m |
Without timeframe-detail, this would look like:
| Pair | Entry Time | Exit Time | Duration |
|------|------------|-----------| -------- |
| BTC/USDT | 2024-01-01 10:00:00 | 2021-01-01 11:00:00 | 1h |
| BTC/USDT | 2024-01-01 11:00:00 | 2021-01-01 12:00:00 | 1h |
The difference is significant, as without detail data, only the first `max_open_trades` signals per candle are evaluated, and the trade slots are only freed at the end of the candle, allowing for a new trade to be opened at the next candle.
## Backtesting multiple strategies
To compare multiple strategies, a list of Strategies can be provided to backtesting.
This is limited to 1 timeframe value per run. However, data is only loaded once from disk so if you have multiple
strategies you'd like to compare, this will give a nice runtime boost.
All listed Strategies need to be in the same directory, unless also `--recursive-strategy-search` is specified, where sub-directories within the strategy directory are also considered.
``` bash
freqtrade backtesting --timerange 20180401-20180410 --timeframe 5m --strategy-list Strategy001 Strategy002 --export trades
```
This will save the results to `user_data/backtest_results/backtest-result-<datetime>.json`, including results for both `Strategy001` and `Strategy002`.
There will be an additional table comparing win/losses of the different strategies (identical to the "Total" row in the first table).
Detailed output for all strategies one after the other will be available, so make sure to scroll up to see the details per strategy.
```
================================================== STRATEGY SUMMARY ===================================================================
| Strategy | Trades | Avg Profit % | Tot Profit BTC | Tot Profit % | Avg Duration | Wins | Draws | Losses | Drawdown % |
|-------------+---------+----------------+------------------+----------------+----------------+-------+--------+--------+------------|
| Strategy1 | 429 | 0.36 | 0.00762792 | 76.20 | 4:12:00 | 186 | 0 | 243 | 45.2 |
| Strategy2 | 1487 | -0.13 | -0.00988917 | -98.79 | 4:43:00 | 662 | 0 | 825 | 241.68 |
```
## Next step
Great, your strategy is profitable. What if the bot can give your the optimal parameters to use for your strategy?
Your next step is to learn [how to find optimal parameters with Hyperopt](hyperopt.md)

105
docs/bot-basics.md Normal file
View File

@ -0,0 +1,105 @@
# Freqtrade basics
This page provides you some basic concepts on how Freqtrade works and operates.
## Freqtrade terminology
* **Strategy**: Your trading strategy, telling the bot what to do.
* **Trade**: Open position.
* **Open Order**: Order which is currently placed on the exchange, and is not yet complete.
* **Pair**: Tradable pair, usually in the format of Base/Quote (e.g. `XRP/USDT` for spot, `XRP/USDT:USDT` for futures).
* **Timeframe**: Candle length to use (e.g. `"5m"`, `"1h"`, ...).
* **Indicators**: Technical indicators (SMA, EMA, RSI, ...).
* **Limit order**: Limit orders which execute at the defined limit price or better.
* **Market order**: Guaranteed to fill, may move price depending on the order size.
* **Current Profit**: Currently pending (unrealized) profit for this trade. This is mainly used throughout the bot and UI.
* **Realized Profit**: Already realized profit. Only relevant in combination with [partial exits](strategy-callbacks.md#adjust-trade-position) - which also explains the calculation logic for this.
* **Total Profit**: Combined realized and unrealized profit. The relative number (%) is calculated against the total investment in this trade.
## Fee handling
All profit calculations of Freqtrade include fees. For Backtesting / Hyperopt / Dry-run modes, the exchange default fee is used (lowest tier on the exchange). For live operations, fees are used as applied by the exchange (this includes BNB rebates etc.).
## Pair naming
Freqtrade follows the [ccxt naming convention](https://docs.ccxt.com/#/README?id=consistency-of-base-and-quote-currencies) for currencies.
Using the wrong naming convention in the wrong market will usually result in the bot not recognizing the pair, usually resulting in errors like "this pair is not available".
### Spot pair naming
For spot pairs, naming will be `base/quote` (e.g. `ETH/USDT`).
### Futures pair naming
For futures pairs, naming will be `base/quote:settle` (e.g. `ETH/USDT:USDT`).
## Bot execution logic
Starting freqtrade in dry-run or live mode (using `freqtrade trade`) will start the bot and start the bot iteration loop.
This will also run the `bot_start()` callback.
By default, the bot loop runs every few seconds (`internals.process_throttle_secs`) and performs the following actions:
* Fetch open trades from persistence.
* Calculate current list of tradable pairs.
* Download OHLCV data for the pairlist including all [informative pairs](strategy-customization.md#get-data-for-non-tradeable-pairs)
This step is only executed once per Candle to avoid unnecessary network traffic.
* Call `bot_loop_start()` strategy callback.
* Analyze strategy per pair.
* Call `populate_indicators()`
* Call `populate_entry_trend()`
* Call `populate_exit_trend()`
* Update trades open order state from exchange.
* Call `order_filled()` strategy callback for filled orders.
* Check timeouts for open orders.
* Calls `check_entry_timeout()` strategy callback for open entry orders.
* Calls `check_exit_timeout()` strategy callback for open exit orders.
* Calls `adjust_order_price()` strategy callback for open orders.
* Calls `adjust_entry_price()` strategy callback for open entry orders. *only called when `adjust_order_price()` is not implemented*
* Calls `adjust_exit_price()` strategy callback for open exit orders. *only called when `adjust_order_price()` is not implemented*
* Verifies existing positions and eventually places exit orders.
* Considers stoploss, ROI and exit-signal, `custom_exit()` and `custom_stoploss()`.
* Determine exit-price based on `exit_pricing` configuration setting or by using the `custom_exit_price()` callback.
* Before an exit order is placed, `confirm_trade_exit()` strategy callback is called.
* Check position adjustments for open trades if enabled by calling `adjust_trade_position()` and place additional order if required.
* Check if trade-slots are still available (if `max_open_trades` is reached).
* Verifies entry signal trying to enter new positions.
* Determine entry-price based on `entry_pricing` configuration setting, or by using the `custom_entry_price()` callback.
* In Margin and Futures mode, `leverage()` strategy callback is called to determine the desired leverage.
* Determine stake size by calling the `custom_stake_amount()` callback.
* Before an entry order is placed, `confirm_trade_entry()` strategy callback is called.
This loop will be repeated again and again until the bot is stopped.
## Backtesting / Hyperopt execution logic
[backtesting](backtesting.md) or [hyperopt](hyperopt.md) do only part of the above logic, since most of the trading operations are fully simulated.
* Load historic data for configured pairlist.
* Calls `bot_start()` once.
* Calculate indicators (calls `populate_indicators()` once per pair).
* Calculate entry / exit signals (calls `populate_entry_trend()` and `populate_exit_trend()` once per pair).
* Loops per candle simulating entry and exit points.
* Calls `bot_loop_start()` strategy callback.
* Check for Order timeouts, either via the `unfilledtimeout` configuration, or via `check_entry_timeout()` / `check_exit_timeout()` strategy callbacks.
* Calls `adjust_order_price()` strategy callback for open orders.
* Calls `adjust_entry_price()` strategy callback for open entry orders. *only called when `adjust_order_price()` is not implemented!*
* Calls `adjust_exit_price()` strategy callback for open exit orders. *only called when `adjust_order_price()` is not implemented!*
* Check for trade entry signals (`enter_long` / `enter_short` columns).
* Confirm trade entry / exits (calls `confirm_trade_entry()` and `confirm_trade_exit()` if implemented in the strategy).
* Call `custom_entry_price()` (if implemented in the strategy) to determine entry price (Prices are moved to be within the opening candle).
* In Margin and Futures mode, `leverage()` strategy callback is called to determine the desired leverage.
* Determine stake size by calling the `custom_stake_amount()` callback.
* Check position adjustments for open trades if enabled and call `adjust_trade_position()` to determine if an additional order is requested.
* Call `order_filled()` strategy callback for filled entry orders.
* Call `custom_stoploss()` and `custom_exit()` to find custom exit points.
* For exits based on exit-signal, custom-exit and partial exits: Call `custom_exit_price()` to determine exit price (Prices are moved to be within the closing candle).
* Call `order_filled()` strategy callback for filled exit orders.
* Generate backtest report output
!!! Note
Both Backtesting and Hyperopt include exchange default Fees in the calculation. Custom fees can be passed to backtesting / hyperopt by specifying the `--fee` argument.
!!! Warning "Callback call frequency"
Backtesting will call each callback at max. once per candle (`--timeframe-detail` modifies this behavior to once per detailed candle).
Most callbacks will be called once per iteration in live (usually every ~5s) - which can cause backtesting mismatches.

136
docs/bot-usage.md Normal file
View File

@ -0,0 +1,136 @@
# Start the bot
This page explains the different parameters of the bot and how to run it.
!!! Note
If you've used `setup.sh`, don't forget to activate your virtual environment (`source .venv/bin/activate`) before running freqtrade commands.
!!! Warning "Up-to-date clock"
The clock on the system running the bot must be accurate, synchronized to a NTP server frequently enough to avoid problems with communication to the exchanges.
## Bot commands
--8<-- "commands/main.md"
### Bot trading commands
--8<-- "commands/trade.md"
### How to specify which configuration file be used?
The bot allows you to select which configuration file you want to use by means of
the `-c/--config` command line option:
```bash
freqtrade trade -c path/far/far/away/config.json
```
Per default, the bot loads the `config.json` configuration file from the current
working directory.
### How to use multiple configuration files?
The bot allows you to use multiple configuration files by specifying multiple
`-c/--config` options in the command line. Configuration parameters
defined in the latter configuration files override parameters with the same name
defined in the previous configuration files specified in the command line earlier.
For example, you can make a separate configuration file with your key and secret
for the Exchange you use for trading, specify default configuration file with
empty key and secret values while running in the Dry Mode (which does not actually
require them):
```bash
freqtrade trade -c ./config.json
```
and specify both configuration files when running in the normal Live Trade Mode:
```bash
freqtrade trade -c ./config.json -c path/to/secrets/keys.config.json
```
This could help you hide your private Exchange key and Exchange secret on you local machine
by setting appropriate file permissions for the file which contains actual secrets and, additionally,
prevent unintended disclosure of sensitive private data when you publish examples
of your configuration in the project issues or in the Internet.
See more details on this technique with examples in the documentation page on
[configuration](configuration.md).
### Where to store custom data
Freqtrade allows the creation of a user-data directory using `freqtrade create-userdir --userdir someDirectory`.
This directory will look as follows:
```
user_data/
├── backtest_results
├── data
├── hyperopts
├── hyperopt_results
├── plot
└── strategies
```
You can add the entry "user_data_dir" setting to your configuration, to always point your bot to this directory.
Alternatively, pass in `--userdir` to every command.
The bot will fail to start if the directory does not exist, but will create necessary subdirectories.
This directory should contain your custom strategies, custom hyperopts and hyperopt loss functions, backtesting historical data (downloaded using either backtesting command or the download script) and plot outputs.
It is recommended to use version control to keep track of changes to your strategies.
### How to use **--strategy**?
This parameter will allow you to load your custom strategy class.
To test the bot installation, you can use the `SampleStrategy` installed by the `create-userdir` subcommand (usually `user_data/strategy/sample_strategy.py`).
The bot will search your strategy file within `user_data/strategies`.
To use other directories, please read the next section about `--strategy-path`.
To load a strategy, simply pass the class name (e.g.: `CustomStrategy`) in this parameter.
**Example:**
In `user_data/strategies` you have a file `my_awesome_strategy.py` which has
a strategy class called `AwesomeStrategy` to load it:
```bash
freqtrade trade --strategy AwesomeStrategy
```
If the bot does not find your strategy file, it will display in an error
message the reason (File not found, or errors in your code).
Learn more about strategy file in
[Strategy Customization](strategy-customization.md).
### How to use **--strategy-path**?
This parameter allows you to add an additional strategy lookup path, which gets
checked before the default locations (The passed path must be a directory!):
```bash
freqtrade trade --strategy AwesomeStrategy --strategy-path /some/directory
```
#### How to install a strategy?
This is very simple. Copy paste your strategy file into the directory
`user_data/strategies` or use `--strategy-path`. And voila, the bot is ready to use it.
### How to use **--db-url**?
When you run the bot in Dry-run mode, per default no transactions are
stored in a database. If you want to store your bot actions in a DB
using `--db-url`. This can also be used to specify a custom database
in production mode. Example command:
```bash
freqtrade trade -c config.json --db-url sqlite:///tradesv3.dry_run.sqlite
```
## Next step
The optimal strategy of the bot will change with time depending of the market trends. The next step is to
[Strategy Customization](strategy-customization.md).

View File

@ -0,0 +1,66 @@
```
usage: freqtrade backtesting-analysis [-h] [-v] [--no-color] [--logfile FILE]
[-V] [-c PATH] [-d PATH]
[--userdir PATH]
[--export-filename PATH]
[--analysis-groups {0,1,2,3,4,5} [{0,1,2,3,4,5} ...]]
[--enter-reason-list ENTER_REASON_LIST [ENTER_REASON_LIST ...]]
[--exit-reason-list EXIT_REASON_LIST [EXIT_REASON_LIST ...]]
[--indicator-list INDICATOR_LIST [INDICATOR_LIST ...]]
[--entry-only] [--exit-only]
[--timerange TIMERANGE]
[--rejected-signals] [--analysis-to-csv]
[--analysis-csv-path ANALYSIS_CSV_PATH]
options:
-h, --help show this help message and exit
--export-filename PATH, --backtest-filename PATH
Use this filename for backtest results.Requires
`--export` to be set as well. Example: `--export-filen
ame=user_data/backtest_results/backtest_today.json`
--analysis-groups {0,1,2,3,4,5} [{0,1,2,3,4,5} ...]
grouping output - 0: simple wins/losses by enter tag,
1: by enter_tag, 2: by enter_tag and exit_tag, 3: by
pair and enter_tag, 4: by pair, enter_ and exit_tag
(this can get quite large), 5: by exit_tag
--enter-reason-list ENTER_REASON_LIST [ENTER_REASON_LIST ...]
Space separated list of entry signals to analyse.
Default: all. e.g. 'entry_tag_a entry_tag_b'
--exit-reason-list EXIT_REASON_LIST [EXIT_REASON_LIST ...]
Space separated list of exit signals to analyse.
Default: all. e.g. 'exit_tag_a roi stop_loss
trailing_stop_loss'
--indicator-list INDICATOR_LIST [INDICATOR_LIST ...]
Space separated list of indicators to analyse. e.g.
'close rsi bb_lowerband profit_abs'
--entry-only Only analyze entry signals.
--exit-only Only analyze exit signals.
--timerange TIMERANGE
Specify what timerange of data to use.
--rejected-signals Analyse rejected signals
--analysis-to-csv Save selected analysis tables to individual CSVs
--analysis-csv-path ANALYSIS_CSV_PATH
Specify a path to save the analysis CSVs if
--analysis-to-csv is enabled. Default:
user_data/basktesting_results/
Common arguments:
-v, --verbose Verbose mode (-vv for more, -vvv to get all messages).
--no-color Disable colorization of hyperopt results. May be
useful if you are redirecting output to a file.
--logfile FILE, --log-file FILE
Log to the file specified. Special values are:
'syslog', 'journald'. See the documentation for more
details.
-V, --version show program's version number and exit
-c PATH, --config PATH
Specify configuration file (default:
`userdir/config.json` or `config.json` whichever
exists). Multiple --config options may be used. Can be
set to `-` to read config from stdin.
-d PATH, --datadir PATH, --data-dir PATH
Path to directory with historical backtesting data.
--userdir PATH, --user-data-dir PATH
Path to userdata directory.
```

Some files were not shown because too many files have changed in this diff Show More