Skip to content

Commit

Permalink
Merge pull request #46 from fortesenselabs/feature/environments
Browse files Browse the repository at this point in the history
Feature: Environments*
  • Loading branch information
seekersoftec authored Nov 28, 2024
2 parents b0a4370 + c257ff3 commit 4e3f9f7
Show file tree
Hide file tree
Showing 900 changed files with 491,218 additions and 10,171 deletions.
20 changes: 10 additions & 10 deletions .docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,23 @@ version: '3.5'

services:
postgres:
container_name: nautilus-database
container_name: tradeflow-database
image: postgres
environment:
POSTGRES_USER: ${POSTGRES_USER:-postgres}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-pass}
POSTGRES_DATABASE: nautilus
POSTGRES_DATABASE: tradeflow
PGDATA: /data/postgres
volumes:
- nautilus-database:/data/postgres
- tradeflow-database:/data/postgres
ports:
- "5432:5432"
networks:
- nautilus-network
- tradeflow-network
restart: unless-stopped

pgadmin:
container_name: nautilus-pgadmin
container_name: tradeflow-pgadmin
image: dpage/pgadmin4
environment:
PGADMIN_DEFAULT_EMAIL: ${PGADMIN_DEFAULT_EMAIL:[email protected]}
Expand All @@ -28,21 +28,21 @@ services:
ports:
- "${PGADMIN_PORT:-5051}:80"
networks:
- nautilus-network
- tradeflow-network
restart: unless-stopped

redis:
container_name: nautilus-redis
container_name: tradeflow-redis
image: redis
ports:
- 6379:6379
restart: unless-stopped
networks:
- nautilus-network
- tradeflow-network

networks:
nautilus-network:
tradeflow-network:

volumes:
nautilus-database:
tradeflow-database:
pgadmin:
7 changes: 0 additions & 7 deletions .docker/jupyterlab.dockerfile

This file was deleted.

51 changes: 0 additions & 51 deletions .docker/nautilus_trader.dockerfile

This file was deleted.

3 changes: 2 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -385,4 +385,5 @@ tests/

# micell.
research/*
add_stage.sh
add_stage.sh
config.json
25 changes: 23 additions & 2 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,20 +60,41 @@ jobs:
id: branch-name
uses: tj-actions/[email protected]

- name: Build MetaTrader 5 Terminal Image (latest)
- name: Build and Push MetaTrader 5 Terminal image (latest) to Github Packages
if: ${{ steps.branch-name.outputs.current_branch == 'main' }}
id: mt5_build_latest
uses: docker/build-push-action@v3
with:
file: "infrastructure/MetaTrader5/Dockerfile"
context: "infrastructure/MetaTrader5/"
push: true
tags: ghcr.io/${{ github.repository_owner }}/metatrader5-terminal:latest
tags: ${{ github.repository_owner }}/metatrader5-terminal:latest,${{ github.repository_owner }}/metatrader5-terminal:${{ github.sha }},${{ github.repository_owner }}/metatrader5-terminal:${{ github.run_id }}
cache-from: type=gha
cache-to: type=gha
- name: Digest metatrader5-terminal image
run: echo ${{ steps.mt5_build_latest.outputs.digest }}

- name: Login to DockerHub
uses: docker/[email protected]
with:
username: ${{ secrets.DOCKERHUB_USER }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Build and Push MetaTrader 5 Terminal image (latest) to DockerHub
if: ${{ steps.branch-name.outputs.current_branch == 'main' }}
id: mt5_build_docker_latest
uses: docker/build-push-action@v3
with:
file: "infrastructure/MetaTrader5/Dockerfile"
context: "infrastructure/MetaTrader5/"
platforms: linux/amd64
push: true
tags: ${{ github.repository_owner }}/metatrader5-terminal:latest,${{ github.repository_owner }}/metatrader5-terminal:${{ github.sha }},${{ github.repository_owner }}/metatrader5-terminal:${{ github.run_id }}
cache-from: type=gha
cache-to: type=gha
- name: Digest metatrader5-terminal image
run: echo ${{ steps.mt5_build_docker_latest.outputs.digest }}

# - name: Build Trade Flow Environment image (latest)
# if: ${{ steps.branch-name.outputs.current_branch == 'main' }}
# id: trade_flow_env_build_latest
Expand Down
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -280,4 +280,6 @@ add_stage.sh
catalog/
*.session*
*trades.db
current_open_positions.csv
current_open_positions.csv
config.json
omar.conf
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,12 @@ All names, logos, and brands of third parties that may be referenced in our site

## References

**Note:** This project is built on top of the following papers, libraries, and frameworks, so most of the code and concepts are heavily borrowed from them.
This project is built on top of the following papers, libraries, and frameworks, so most of the code and concepts are heavily borrowed from them.

- [Core paper](https://github.com/fortesenselabs/trade_flow/blob/main/docs/books/1911.10107v1.pdf)
- https://github.com/tensortrade-org/tensortrade
- https://github.com/nautechsystems/nautilus_trader/
- https://github.com/AI4Finance-Foundation
- https://github.com/OpenBB-finance
- https://github.com/amor71/LiuAlgoTrader
- https://github.com/crflynn/stochastic
Binary file added docs/images/itb_fig_1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
10 changes: 5 additions & 5 deletions examples/create_environment_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
from trade_flow.environments.generic.environment import TradingEnvironment
from trade_flow.environments.utils import create_env_from_dataframe
from trade_flow.feed import Stream, Coinbase_BTCUSD_1h, Coinbase_BTCUSD_d
from trade_flow.environments.default.oms.exchanges import Exchange
from trade_flow.environments.default.oms.execution.simulated import execute_order
from trade_flow.environments.default.oms.instruments import USD, BTC
from trade_flow.environments.default.oms.wallet import Wallet
from trade_flow.environments.default.oms.portfolio import Portfolio
from trade_flow.environments.default.engine.exchanges import Exchange
from trade_flow.environments.default.engine.execution.simulated import execute_order
from trade_flow.environments.default.engine.instruments import USD, BTC
from trade_flow.environments.default.engine.wallet import Wallet
from trade_flow.environments.default.engine.portfolio import Portfolio
import trade_flow.environments.default as default
from trade_flow.agents import SB3Agent

Expand Down
2 changes: 1 addition & 1 deletion examples/indicator_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
print("Dataset:", df, "\n")

# Create an instance of SupportResistanceIndicator or OptimizedSupportResistanceIndicator
indicator = SupportResistanceIndicator(df)
indicator = OptimizedSupportResistanceIndicator(df)

# Calculate Pivot Points
pivot_points_df = indicator.calculate_pivot_points()
Expand Down
137 changes: 137 additions & 0 deletions examples/nt_example/model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
# Define Linear Regression Model
import pandas as pd
from typing import Optional
from sklearn.linear_model import LinearRegression
from sklearn.metrics import r2_score

from nautilus_trader.config import ActorConfig
from nautilus_trader.common.actor import Actor, ActorConfig
from nautilus_trader.model.data import DataType
from nautilus_trader.model.data import Bar, BarSpecification
from nautilus_trader.model.identifiers import InstrumentId
from nautilus_trader.common.enums import LogColor
from nautilus_trader.core.datetime import secs_to_nanos, unix_nanos_to_dt
from utils import bars_to_dataframe, make_bar_type, ModelUpdate, Prediction


class PredictedPriceConfig(ActorConfig):
source_symbol: str
target_symbol: str
bar_spec: str = "10-DAY-LAST" # "10-SECOND-LAST"
min_model_timedelta: str = "14D"


class PredictedPriceActor(Actor):
def __init__(self, config: PredictedPriceConfig):
super().__init__(config=config)

self.source_id = InstrumentId.from_str(config.source_symbol)
self.target_id = InstrumentId.from_str(config.target_symbol)
self.bar_spec = BarSpecification.from_str(self.config.bar_spec)
self.model: Optional[LinearRegression] = None
self.hedge_ratio: Optional[float] = None
self._min_model_timedelta = secs_to_nanos(
pd.Timedelta(self.config.min_model_timedelta).total_seconds()
)
self._last_model = pd.Timestamp(0)

def on_start(self):
# Set instruments
self.left = self.cache.instrument(self.source_id)
self.right = self.cache.instrument(self.target_id)

# Subscribe to bars
self.subscribe_bars(make_bar_type(instrument_id=self.source_id, bar_spec=self.bar_spec))
self.subscribe_bars(make_bar_type(instrument_id=self.target_id, bar_spec=self.bar_spec))

def on_bar(self, bar: Bar):
self._check_model_fit(bar)
self._predict(bar)
self.log(Bar)

def on_stop(self):
# Unsubscribe from bars
self.unsubscribe_bars(make_bar_type(instrument_id=self.source_id, bar_spec=self.bar_spec))
self.unsubscribe_bars(make_bar_type(instrument_id=self.target_id, bar_spec=self.bar_spec))

@property
def data_length_valid(self) -> bool:
return self._check_first_tick(self.source_id) and self._check_first_tick(self.target_id)

@property
def has_fit_model_today(self):
return unix_nanos_to_dt(self.clock.timestamp_ns()).date() == self._last_model.date()

def _check_first_tick(self, instrument_id) -> bool:
"""Check we have enough bar data for this `instrument_id`, according to `min_model_timedelta`"""
bars = self.cache.bars(bar_type=make_bar_type(instrument_id, bar_spec=self.bar_spec))
if not bars:
return False
delta = self.clock.timestamp_ns() - bars[-1].ts_init
return delta > self._min_model_timedelta

def _check_model_fit(self, bar: Bar):
# Check we have the minimum required data
if not self.data_length_valid:
return

# Check we haven't fit a model yet today
if self.has_fit_model_today:
return

# Generate a dataframe from cached bar data
df = bars_to_dataframe(
{
self.source_id.value: self.cache.bars(
bar_type=make_bar_type(self.source_id, bar_spec=self.bar_spec)
),
self.target_id.value: self.cache.bars(
bar_type=make_bar_type(self.target_id, bar_spec=self.bar_spec)
),
}
)

# Format the arrays for scikit-learn
X = df["close"].loc[:, self.source_id.value].astype(float).values.reshape(-1, 1)
Y = df["close"].loc[:, self.target_id.value].astype(float).values.reshape(-1, 1)

# Fit a model
self.model = LinearRegression(fit_intercept=False)
self.model.fit(X, Y)
self.log.info(
f"Fit model @ {unix_nanos_to_dt(bar.ts_init)}, r2: {r2_score(Y, self.model.predict(X))}",
color=LogColor.BLUE,
)
self._last_model = unix_nanos_to_dt(bar.ts_init)

# Record std dev of predictions (used for scaling our order price)
pred = self.model.predict(X)
errors = pred - Y
std_pred = errors.std()

# The model slope is our hedge ratio (the ratio of source
self.hedge_ratio = float(self.model.coef_[0][0])
self.log.info(f"Computed hedge_ratio={self.hedge_ratio:0.4f}", color=LogColor.BLUE)

# Publish model
model_update = ModelUpdate(
model=self.model,
hedge_ratio=self.hedge_ratio,
std_prediction=std_pred,
ts_init=bar.ts_init,
)
self.publish_data(
data_type=DataType(ModelUpdate, metadata={"instrument_id": self.target_id.value}),
data=model_update,
)

def _predict(self, bar: Bar):
if self.model is not None and bar.bar_type.instrument_id == self.source_id:
pred = self.model.predict([[bar.close]])[0][0]
prediction = Prediction(
instrument_id=self.target_id, prediction=pred, ts_init=bar.ts_init
)
self.publish_data(
data_type=DataType(Prediction, metadata={"instrument_id": self.target_id.value}),
data=prediction,
)
File renamed without changes.
Loading

0 comments on commit 4e3f9f7

Please sign in to comment.