From 7afb64634c1c17d38ddceb301c5020b1bf7bf58f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 24 May 2025 22:35:10 -0400 Subject: [PATCH 001/244] (feat) improve fs utils --- utils/file_system.py | 57 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 57 insertions(+) diff --git a/utils/file_system.py b/utils/file_system.py index 040126f8..da108d51 100644 --- a/utils/file_system.py +++ b/utils/file_system.py @@ -229,7 +229,59 @@ def save_model_to_yml(yml_path: Path, cm: ClientConfigAdapter): except Exception as e: logging.error("Error writing configs: %s" % (str(e),), exc_info=True) + def get_base_path(self): + """ + Returns the base path for file operations + :return: The base path string + """ + return self.base_path + + def get_directory_creation_time(self, path): + """ + Get the creation time of a directory + :param path: The path to the directory + :return: ISO formatted creation time string or None if directory doesn't exist + """ + import os + import datetime + + full_path = os.path.join(self.base_path, path) + if not os.path.exists(full_path): + return None + + # Get creation time (platform dependent) + try: + # For Unix systems, use stat + creation_time = os.stat(full_path).st_ctime + # Convert to datetime + return datetime.datetime.fromtimestamp(creation_time).isoformat() + except Exception: + # Fallback + return "unknown" + + def list_directories(self, path): + """ + List all directories within a given path + :param path: The path to list directories from + :return: List of directory names + """ + import os + + full_path = os.path.join(self.base_path, path) + if not os.path.exists(full_path): + return [] + + try: + # Return only directories + return [d for d in os.listdir(full_path) if os.path.isdir(os.path.join(full_path, d))] + except Exception: + return [] + def list_databases(self): + """ + Lists all database files in archived instances + :return: List of database file paths + """ archived_path = os.path.join(self.base_path, "archived") archived_instances = self.list_folders("archived") archived_databases = [] @@ -240,6 +292,11 @@ def list_databases(self): return archived_databases def list_checkpoints(self, full_path: bool): + """ + Lists all checkpoint database files + :param full_path: If True, return full paths, otherwise just filenames + :return: List of checkpoint database files + """ dir_path = os.path.join(self.base_path, "data") if full_path: checkpoints = [os.path.join(dir_path, f) for f in os.listdir(dir_path) if From b52b91cc16e2c60295e5cf6576529ed01e372653 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 24 May 2025 22:35:35 -0400 Subject: [PATCH 002/244] (feat) add error handling for mqtt and middleware --- main.py | 44 +++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 41 insertions(+), 3 deletions(-) diff --git a/main.py b/main.py index f3bf1947..f05687c4 100644 --- a/main.py +++ b/main.py @@ -1,10 +1,12 @@ import os import secrets +from contextlib import asynccontextmanager from typing import Annotated from dotenv import load_dotenv from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials +from fastapi.middleware.cors import CORSMiddleware from routers import ( manage_accounts, @@ -16,20 +18,54 @@ manage_market_data, manage_performance, ) +from utils.mqtt_exception_handler import setup_global_mqtt_exception_handler +# Load environment variables early load_dotenv() -security = HTTPBasic() +# Environment variables username = os.getenv("USERNAME", "admin") password = os.getenv("PASSWORD", "admin") -debug_mode = os.getenv("DEBUG_MODE", False) +debug_mode = os.getenv("DEBUG_MODE", "False").lower() in ("true", "1", "t") + +# Security setup +security = HTTPBasic() + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """ + Lifespan context manager for the FastAPI application. + Handles startup and shutdown events. + """ + # Startup logic + setup_global_mqtt_exception_handler() + yield + # Shutdown logic (add cleanup code here if needed) -app = FastAPI() + +# Initialize FastAPI with metadata and lifespan +app = FastAPI( + title="Hummingbot Backend API", + description="API for managing Hummingbot trading instances", + version="0.1.0", + lifespan=lifespan, +) + +# Add CORS middleware +app.add_middleware( + CORSMiddleware, + allow_origins=["*"], # Modify in production to specific origins + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) def auth_user( credentials: Annotated[HTTPBasicCredentials, Depends(security)], ): + """Authenticate user using HTTP Basic Auth""" current_username_bytes = credentials.username.encode("utf8") correct_username_bytes = f"{username}".encode("utf8") is_correct_username = secrets.compare_digest( @@ -46,8 +82,10 @@ def auth_user( detail="Incorrect username or password", headers={"WWW-Authenticate": "Basic"}, ) + return credentials.username +# Include all routers with authentication app.include_router(manage_docker.router, dependencies=[Depends(auth_user)]) app.include_router(manage_broker_messages.router, dependencies=[Depends(auth_user)]) app.include_router(manage_files.router, dependencies=[Depends(auth_user)]) From 4a4e32d5a879346bba20ad53c1ca37581c10526e Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 24 May 2025 22:36:05 -0400 Subject: [PATCH 003/244] (feat) add logic to listen to bots and gracefully shutdown them --- services/bots_orchestrator.py | 84 ++++++++++++++++++++++++----------- 1 file changed, 59 insertions(+), 25 deletions(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index b65293a5..94968a0a 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -1,4 +1,5 @@ import asyncio +import logging from collections import deque from typing import Optional @@ -7,6 +8,8 @@ from hbotrc.listener import BotListener from hbotrc.spec import TopicSpecs +logger = logging.getLogger(__name__) + class HummingbotPerformanceListener(BotListener): def __init__(self, *args, **kwargs): @@ -20,6 +23,7 @@ def __init__(self, *args, **kwargs): self._bot_error_logs = deque(maxlen=100) self._bot_general_logs = deque(maxlen=100) self.performance_report_sub = None + self._is_stopping = False def get_bot_performance(self): return self._bot_performance @@ -46,8 +50,17 @@ def _on_log(self, log): self._bot_general_logs.append(log) def stop(self): - super().stop() - self._bot_performance = {} + self._is_stopping = True + try: + super().stop() + except ConnectionError: + # Expected when bot disconnects + logger.debug(f"Bot {self._bot_id} disconnected as expected") + except Exception as e: + logger.error(f"Error stopping listener for bot {self._bot_id}: {e}") + finally: + self._bot_performance = {} + self._is_stopping = False class BotsManager: @@ -66,6 +79,7 @@ def hummingbot_containers_fiter(container): return "hummingbot" in container.name and "broker" not in container.name except Exception: return False + async def get_active_containers(self): loop = asyncio.get_event_loop() return await loop.run_in_executor(None, self._sync_get_active_containers) @@ -87,27 +101,38 @@ def stop_update_active_bots_loop(self): async def update_active_bots(self, sleep_time=1): while True: - active_hbot_containers = await self.get_active_containers() - # Remove bots that are no longer active - for bot in list(self.active_bots): - if bot not in active_hbot_containers: - del self.active_bots[bot] - - # Add new bots or update existing ones - for bot in active_hbot_containers: - if bot not in self.active_bots: - hbot_listener = HummingbotPerformanceListener(host=self.broker_host, port=self.broker_port, - username=self.broker_username, - password=self.broker_password, - bot_id=bot) - hbot_listener.start() - self.active_bots[bot] = { - "bot_name": bot, - "broker_client": BotCommands(host=self.broker_host, port=self.broker_port, - username=self.broker_username, password=self.broker_password, - bot_id=bot), - "broker_listener": hbot_listener, - } + try: + active_hbot_containers = await self.get_active_containers() + # Remove bots that are no longer active + for bot in list(self.active_bots): + if bot not in active_hbot_containers: + # Properly stop the listener before removing + try: + self.active_bots[bot]["broker_listener"].stop() + except Exception as e: + logger.warning(f"Error stopping listener for {bot}: {e}") + del self.active_bots[bot] + + # Add new bots or update existing ones + for bot in active_hbot_containers: + if bot not in self.active_bots: + try: + hbot_listener = HummingbotPerformanceListener(host=self.broker_host, port=self.broker_port, + username=self.broker_username, + password=self.broker_password, + bot_id=bot) + hbot_listener.start() + self.active_bots[bot] = { + "bot_name": bot, + "broker_client": BotCommands(host=self.broker_host, port=self.broker_port, + username=self.broker_username, password=self.broker_password, + bot_id=bot), + "broker_listener": hbot_listener, + } + except Exception as e: + logger.error(f"Error creating listener for {bot}: {e}") + except Exception as e: + logger.error(f"Error in update_active_bots: {e}") await asyncio.sleep(sleep_time) # Interact with a specific bot @@ -118,8 +143,17 @@ def start_bot(self, bot_name, **kwargs): def stop_bot(self, bot_name, **kwargs): if bot_name in self.active_bots: - self.active_bots[bot_name]["broker_listener"].stop() - return self.active_bots[bot_name]["broker_client"].stop(**kwargs) + # First stop the bot command + result = self.active_bots[bot_name]["broker_client"].stop(**kwargs) + + # Then stop the listener, catching any connection errors + try: + self.active_bots[bot_name]["broker_listener"].stop() + except Exception as e: + logger.warning(f"Error stopping listener for {bot_name}: {e}") + # Don't re-raise, as this is expected when bot disconnects + + return result def import_strategy_for_bot(self, bot_name, strategy, **kwargs): if bot_name in self.active_bots: From f425547bfafbdc7618a197e370b416365bd194e0 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 24 May 2025 22:36:52 -0400 Subject: [PATCH 004/244] (feat) change description names --- routers/manage_accounts.py | 2 +- routers/manage_backtesting.py | 2 +- routers/manage_broker_messages.py | 2 +- routers/manage_databases.py | 2 +- routers/manage_docker.py | 2 +- routers/manage_market_data.py | 2 +- routers/manage_performance.py | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/routers/manage_accounts.py b/routers/manage_accounts.py index 7751f7ce..76c92a29 100644 --- a/routers/manage_accounts.py +++ b/routers/manage_accounts.py @@ -7,7 +7,7 @@ from services.accounts_service import AccountsService from utils.file_system import FileSystemUtil -router = APIRouter(tags=["Manage Credentials"]) +router = APIRouter(tags=["Accounts"]) file_system = FileSystemUtil(base_path="bots/credentials") accounts_service = AccountsService() diff --git a/routers/manage_backtesting.py b/routers/manage_backtesting.py index 812f1fa4..27457431 100644 --- a/routers/manage_backtesting.py +++ b/routers/manage_backtesting.py @@ -7,7 +7,7 @@ from config import CONTROLLERS_MODULE, CONTROLLERS_PATH -router = APIRouter(tags=["Market Backtesting"]) +router = APIRouter(tags=["Backtesting"]) candles_factory = CandlesFactory() backtesting_engine = BacktestingEngineBase() diff --git a/routers/manage_broker_messages.py b/routers/manage_broker_messages.py index e31d1f3f..11b90ed7 100644 --- a/routers/manage_broker_messages.py +++ b/routers/manage_broker_messages.py @@ -5,7 +5,7 @@ from services.bots_orchestrator import BotsManager # Initialize the scheduler -router = APIRouter(tags=["Manage Broker Messages"]) +router = APIRouter(tags=["Broker"]) bots_manager = BotsManager(broker_host=BROKER_HOST, broker_port=BROKER_PORT, broker_username=BROKER_USERNAME, broker_password=BROKER_PASSWORD) diff --git a/routers/manage_databases.py b/routers/manage_databases.py index ae90dce8..9ec1a01f 100644 --- a/routers/manage_databases.py +++ b/routers/manage_databases.py @@ -10,7 +10,7 @@ from utils.file_system import FileSystemUtil -router = APIRouter(tags=["Database Management"]) +router = APIRouter(tags=["Databases"]) file_system = FileSystemUtil() diff --git a/routers/manage_docker.py b/routers/manage_docker.py index 9769cff3..3e1f3423 100644 --- a/routers/manage_docker.py +++ b/routers/manage_docker.py @@ -7,7 +7,7 @@ from services.bot_archiver import BotArchiver from services.docker_service import DockerManager -router = APIRouter(tags=["Docker Management"]) +router = APIRouter(tags=["Docker"]) docker_manager = DockerManager() bot_archiver = BotArchiver(os.environ.get("AWS_API_KEY"), os.environ.get("AWS_SECRET_KEY"), os.environ.get("S3_DEFAULT_BUCKET_NAME")) diff --git a/routers/manage_market_data.py b/routers/manage_market_data.py index a5e2bcd7..933cf093 100644 --- a/routers/manage_market_data.py +++ b/routers/manage_market_data.py @@ -4,7 +4,7 @@ from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig -router = APIRouter(tags=["Market Data"]) +router = APIRouter(tags=["Market"]) candles_factory = CandlesFactory() diff --git a/routers/manage_performance.py b/routers/manage_performance.py index 01bc316b..81a51bf1 100644 --- a/routers/manage_performance.py +++ b/routers/manage_performance.py @@ -5,7 +5,7 @@ from utils.etl_databases import PerformanceDataSource -router = APIRouter(tags=["Market Performance"]) +router = APIRouter(tags=["Performance"]) @router.post("/get-performance-results") From f9ba0f070ede904f4bfd5a25b0000034a8305a4f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 24 May 2025 22:42:04 -0400 Subject: [PATCH 005/244] (feat) reorder api routers --- main.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index f05687c4..e0d8bc06 100644 --- a/main.py +++ b/main.py @@ -87,10 +87,12 @@ def auth_user( # Include all routers with authentication app.include_router(manage_docker.router, dependencies=[Depends(auth_user)]) +app.include_router(manage_accounts.router, dependencies=[Depends(auth_user)]) app.include_router(manage_broker_messages.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_files.router, dependencies=[Depends(auth_user)]) +app.include_router(manage_files.configs_router, dependencies=[Depends(auth_user)]) +app.include_router(manage_files.controllers_router, dependencies=[Depends(auth_user)]) +app.include_router(manage_files.scripts_router, dependencies=[Depends(auth_user)]) app.include_router(manage_market_data.router, dependencies=[Depends(auth_user)]) app.include_router(manage_backtesting.router, dependencies=[Depends(auth_user)]) app.include_router(manage_databases.router, dependencies=[Depends(auth_user)]) app.include_router(manage_performance.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_accounts.router, dependencies=[Depends(auth_user)]) From d78371645518cddd606b1039d15af53126199752 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 28 May 2025 17:24:04 +0200 Subject: [PATCH 006/244] (feat) add logfire environment variable --- config.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/config.py b/config.py index f37dae8f..40fbd9f6 100644 --- a/config.py +++ b/config.py @@ -12,4 +12,5 @@ BROKER_USERNAME = os.getenv("BROKER_USERNAME", "admin") BROKER_PASSWORD = os.getenv("BROKER_PASSWORD", "password") PASSWORD_VERIFICATION_PATH = "bots/credentials/master_account/.password_verification" -BANNED_TOKENS = os.getenv("BANNED_TOKENS", "NAV,ARS,ETHW,ETHF").split(",") \ No newline at end of file +BANNED_TOKENS = os.getenv("BANNED_TOKENS", "NAV,ARS,ETHW,ETHF").split(",") +LOGFIRE_ENVIRONMENT = os.getenv("LOGFIRE_ENVIRONMENT", "dev") From 175a659064f5145b0bd68ee86d4b9b8714a1875d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 28 May 2025 17:24:19 +0200 Subject: [PATCH 007/244] (feat) configure logfire with env and service name --- main.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/main.py b/main.py index e0d8bc06..e3fe11cf 100644 --- a/main.py +++ b/main.py @@ -3,11 +3,13 @@ from contextlib import asynccontextmanager from typing import Annotated +import logfire from dotenv import load_dotenv from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.middleware.cors import CORSMiddleware +from config import LOGFIRE_ENVIRONMENT from routers import ( manage_accounts, manage_backtesting, @@ -61,6 +63,8 @@ async def lifespan(app: FastAPI): allow_headers=["*"], ) +logfire.configure(send_to_logfire="if-token-present", environment=LOGFIRE_ENVIRONMENT, service_name="backend-api") +logfire.instrument_fastapi(app) def auth_user( credentials: Annotated[HTTPBasicCredentials, Depends(security)], From 5e776d661442877a86e293b5940990d39e2359d1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 28 May 2025 17:25:05 +0200 Subject: [PATCH 008/244] (feat) add to setup.sh --- set_environment.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/set_environment.sh b/set_environment.sh index 18501980..10b41336 100644 --- a/set_environment.sh +++ b/set_environment.sh @@ -15,3 +15,4 @@ echo "CONFIG_PASSWORD=$CONFIG_PASSWORD" > .env echo "BOTS_PATH=$BOTS_PATH" >> .env echo "USERNAME=$USERNAME" >> .env echo "PASSWORD=$PASSWORD" >> .env +echo "LOGFIRE_ENVIRONMENT=dev" >> .env From 779ecaea1353f2ed79b9bfd8b19a486c89d6ec67 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 28 May 2025 17:25:15 +0200 Subject: [PATCH 009/244] (feat) add logfire dependencies --- environment.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/environment.yml b/environment.yml index d41035c1..e7be063c 100644 --- a/environment.yml +++ b/environment.yml @@ -17,3 +17,6 @@ dependencies: - flake8 - isort - pre-commit + - logfire + - logfire[fastapi] + - logfire[system-metrics] From 5e304bc1128b87c587a446243df03f1fb662f34e Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 28 May 2025 17:25:24 +0200 Subject: [PATCH 010/244] (feat) fix errors getting prices --- services/accounts_service.py | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 9c824fa9..c63320d8 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -25,6 +25,11 @@ class AccountsService: to initialize all the connectors that are connected to each account, keep track of the balances of each account and update the balances of each account. """ + default_quotes = { + "hyperliquid": "USD", + "hyperliquid_perpetual": "USDC", + "xrpl": "RLUSD" + } def __init__(self, update_account_state_interval_minutes: int = 5, @@ -44,11 +49,12 @@ def __init__(self, def get_accounts_state(self): return self.accounts_state - def get_default_market(self, token: str): + def get_default_market(self, token: str, connector_name: str) -> str: if token.startswith("LD") and token != "LDO": # These tokens are staked in binance earn token = token[2:] - return f"{token}-{self.default_quote}" + quote = self.default_quotes.get(connector_name, self.default_quote) + return f"{token}-{quote}" def start_update_account_state_loop(self): """ @@ -203,14 +209,14 @@ async def update_account_state(self): balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if value != Decimal("0") and key not in BANNED_TOKENS] unique_tokens = [balance["token"] for balance in balances] - trading_pairs = [self.get_default_market(token) for token in unique_tokens if "USD" not in token] + trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) for balance in balances: token = balance["token"] if "USD" in token: price = Decimal("1") else: - market = self.get_default_market(balance["token"]) + market = self.get_default_market(balance["token"], connector_name) price = Decimal(last_traded_prices.get(market, 0)) tokens_info.append({ "token": balance["token"], @@ -225,9 +231,8 @@ async def update_account_state(self): f"Error updating balances for connector {connector_name} in account {account_name}: {e}") self.accounts_state[account_name][connector_name] = tokens_info - async def _safe_get_last_traded_prices(self, connector, trading_pairs, timeout=5): + async def _safe_get_last_traded_prices(self, connector, trading_pairs, timeout=10): try: - # TODO: Fix OKX connector to return the markets in Hummingbot format. last_traded = await asyncio.wait_for(connector.get_last_traded_prices(trading_pairs=trading_pairs), timeout=timeout) return last_traded except asyncio.TimeoutError: From 5444bbc3cc0197b76659933c24d9951e97edac83 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 29 May 2025 12:37:38 +0200 Subject: [PATCH 011/244] (feat) abstract connectors in accounts service --- services/accounts_service.py | 50 +++----------- services/connector_manager.py | 124 ++++++++++++++++++++++++++++++++++ 2 files changed, 135 insertions(+), 39 deletions(-) create mode 100644 services/connector_manager.py diff --git a/services/accounts_service.py b/services/accounts_service.py index c63320d8..4f7e3d63 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -6,15 +6,11 @@ from typing import Optional from fastapi import HTTPException -from hummingbot.client.config.client_config_map import ClientConfigMap from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class -from hummingbot.client.settings import AllConnectorSettings from config import BANNED_TOKENS, CONFIG_PASSWORD +from services.connector_manager import ConnectorManager from utils.file_system import FileSystemUtil -from utils.models import BackendAPIConfigAdapter -from utils.security import BackendAPISecurity file_system = FileSystemUtil() @@ -37,6 +33,7 @@ def __init__(self, account_history_file: str = "account_state_history.json"): # TODO: Add database to store the balances of each account each time it is updated. self.secrets_manager = ETHKeyFileSecretManger(CONFIG_PASSWORD) + self.connector_manager = ConnectorManager(self.secrets_manager) self.accounts = {} self.accounts_state = {} self.account_state_update_event = asyncio.Event() @@ -140,7 +137,7 @@ def initialize_accounts(self): for connector_name in self.list_credentials(account_name): try: connector_name = connector_name.split(".")[0] - connector = self.get_connector(account_name, connector_name) + connector = self.connector_manager.get_connector(account_name, connector_name) self.accounts[account_name][connector_name] = connector except Exception as e: logging.error(f"Error initializing connector {connector_name}: {e}") @@ -168,7 +165,7 @@ def initialize_connector(self, account_name: str, connector_name: str): if account_name not in self.accounts: self.accounts[account_name] = {} try: - connector = self.get_connector(account_name, connector_name) + connector = self.connector_manager.get_connector(account_name, connector_name) self.accounts[account_name][connector_name] = connector except Exception as e: logging.error(f"Error initializing connector {connector_name}: {e}") @@ -242,49 +239,20 @@ async def _safe_get_last_traded_prices(self, connector, trading_pairs, timeout=1 logging.error(f"Error getting last traded prices in connector {connector} for trading pairs {trading_pairs}: {e}") return {pair: Decimal("0") for pair in trading_pairs} - @staticmethod - def get_connector_config_map(connector_name: str): + def get_connector_config_map(self, connector_name: str): """ Get the connector config map for the specified connector. :param connector_name: The name of the connector. :return: The connector config map. """ - connector_config = BackendAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) - return [key for key in connector_config.hb_config.__fields__.keys() if key != "connector"] + return self.connector_manager.get_connector_config_map(connector_name) async def add_connector_keys(self, account_name: str, connector_name: str, keys: dict): - BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) - connector_config = BackendAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) - for key, value in keys.items(): - setattr(connector_config, key, value) - BackendAPISecurity.update_connector_keys(account_name, connector_config) - new_connector = self.get_connector(account_name, connector_name) - await new_connector._update_balances() + new_connector = await self.connector_manager.update_connector_keys(account_name, connector_name, keys) self.accounts[account_name][connector_name] = new_connector await self.update_account_state() await self.dump_account_state() - def get_connector(self, account_name: str, connector_name: str): - """ - Get the connector object for the specified account and connector. - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: The connector object. - """ - BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) - client_config_map = ClientConfigAdapter(ClientConfigMap()) - conn_setting = AllConnectorSettings.get_connector_settings()[connector_name] - keys = BackendAPISecurity.api_keys(connector_name) - read_only_config = ReadOnlyClientConfigAdapter.lock_config(client_config_map) - init_params = conn_setting.conn_init_parameters( - trading_pairs=[], - trading_required=True, - api_keys=keys, - client_config_map=read_only_config, - ) - connector_class = get_connector_class(connector_name) - connector = connector_class(**init_params) - return connector @staticmethod def list_accounts(): @@ -319,6 +287,8 @@ def delete_credentials(self, account_name: str, connector_name: str): self.accounts[account_name].pop(connector_name) if connector_name in self.accounts_state[account_name]: self.accounts_state[account_name].pop(connector_name) + # Clear the connector from cache + self.connector_manager.clear_cache(account_name, connector_name) def add_account(self, account_name: str): """ @@ -345,3 +315,5 @@ def delete_account(self, account_name: str): file_system.delete_folder('credentials', account_name) self.accounts.pop(account_name) self.accounts_state.pop(account_name) + # Clear all connectors for this account from cache + self.connector_manager.clear_cache(account_name) diff --git a/services/connector_manager.py b/services/connector_manager.py new file mode 100644 index 00000000..b6412391 --- /dev/null +++ b/services/connector_manager.py @@ -0,0 +1,124 @@ +import logging +from typing import Dict, Optional + +from hummingbot.client.config.client_config_map import ClientConfigMap +from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger +from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class +from hummingbot.client.settings import AllConnectorSettings + +from utils.models import BackendAPIConfigAdapter +from utils.security import BackendAPISecurity + + +class ConnectorManager: + """ + Manages the creation and caching of exchange connectors. + Handles connector configuration and initialization. + """ + + def __init__(self, secrets_manager: ETHKeyFileSecretManger): + self.secrets_manager = secrets_manager + self._connector_cache: Dict[str, Dict[str, any]] = {} + + def get_connector(self, account_name: str, connector_name: str): + """ + Get the connector object for the specified account and connector. + Uses caching to avoid recreating connectors unnecessarily. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :return: The connector object. + """ + cache_key = f"{account_name}:{connector_name}" + + if cache_key in self._connector_cache: + return self._connector_cache[cache_key] + + try: + connector = self._create_connector(account_name, connector_name) + self._connector_cache[cache_key] = connector + return connector + except Exception as e: + logging.error(f"Error creating connector {connector_name} for account {account_name}: {e}") + raise + + def _create_connector(self, account_name: str, connector_name: str): + """ + Create a new connector instance. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :return: The connector object. + """ + BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) + client_config_map = ClientConfigAdapter(ClientConfigMap()) + conn_setting = AllConnectorSettings.get_connector_settings()[connector_name] + keys = BackendAPISecurity.api_keys(connector_name) + read_only_config = ReadOnlyClientConfigAdapter.lock_config(client_config_map) + + init_params = conn_setting.conn_init_parameters( + trading_pairs=[], + trading_required=True, + api_keys=keys, + client_config_map=read_only_config, + ) + + connector_class = get_connector_class(connector_name) + connector = connector_class(**init_params) + return connector + + def clear_cache(self, account_name: Optional[str] = None, connector_name: Optional[str] = None): + """ + Clear the connector cache. + + :param account_name: If provided, only clear cache for this account. + :param connector_name: If provided with account_name, only clear this specific connector. + """ + if account_name and connector_name: + cache_key = f"{account_name}:{connector_name}" + self._connector_cache.pop(cache_key, None) + elif account_name: + # Clear all connectors for this account + keys_to_remove = [k for k in self._connector_cache.keys() if k.startswith(f"{account_name}:")] + for key in keys_to_remove: + self._connector_cache.pop(key) + else: + # Clear entire cache + self._connector_cache.clear() + + @staticmethod + def get_connector_config_map(connector_name: str): + """ + Get the connector config map for the specified connector. + + :param connector_name: The name of the connector. + :return: The connector config map. + """ + connector_config = BackendAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) + return [key for key in connector_config.hb_config.__fields__.keys() if key != "connector"] + + async def update_connector_keys(self, account_name: str, connector_name: str, keys: dict): + """ + Update the API keys for a connector and refresh the connector instance. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :param keys: Dictionary of API keys to update. + :return: The updated connector instance. + """ + BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) + connector_config = BackendAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) + + for key, value in keys.items(): + setattr(connector_config, key, value) + + BackendAPISecurity.update_connector_keys(account_name, connector_config) + + # Clear the cache for this connector to force recreation with new keys + self.clear_cache(account_name, connector_name) + + # Create and return new connector instance + new_connector = self.get_connector(account_name, connector_name) + await new_connector._update_balances() + + return new_connector \ No newline at end of file From 2a752c3a92da59955f1fabec41f0e81889b04cdd Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 29 May 2025 12:37:51 +0200 Subject: [PATCH 012/244] (feat) rename service for convention --- services/docker_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/docker_service.py b/services/docker_service.py index 2232893c..de0da63a 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -12,7 +12,7 @@ file_system = FileSystemUtil() -class DockerManager: +class DockerService: def __init__(self): self.SOURCE_PATH = os.getcwd() try: From 6edb97943380431dc4bd6b0ef88d9b3e689ff5d6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 29 May 2025 15:57:48 +0200 Subject: [PATCH 013/244] (feat) remove hbotrc in favor of direct usage of paho and asyncio mqtt --- environment.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/environment.yml b/environment.yml index e7be063c..0d356462 100644 --- a/environment.yml +++ b/environment.yml @@ -13,10 +13,10 @@ dependencies: - pip - pip: - hummingbot - - git+https://github.com/hummingbot/hbot-remote-client-py.git - flake8 - isort - pre-commit - logfire - logfire[fastapi] - logfire[system-metrics] + - aiomqtt>=2.0.0 From 1f964868c2a49677aaeafc92cf58eaa90bc2f1a4 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:00:15 +0700 Subject: [PATCH 014/244] (feat) move to new mqtt manager --- services/bots_orchestrator.py | 335 ++++++++++++++++++++-------------- 1 file changed, 198 insertions(+), 137 deletions(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index 94968a0a..a5aa1215 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -1,80 +1,42 @@ import asyncio import logging -from collections import deque from typing import Optional import docker -from hbotrc import BotCommands -from hbotrc.listener import BotListener -from hbotrc.spec import TopicSpecs + +from utils.mqtt_manager import MQTTManager logger = logging.getLogger(__name__) -class HummingbotPerformanceListener(BotListener): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - topic_prefix = TopicSpecs.PREFIX.format( - namespace=self._ns, - instance_id=self._bot_id - ) - self._performance_topic = f'{topic_prefix}/performance' - self._bot_performance = {} - self._bot_error_logs = deque(maxlen=100) - self._bot_general_logs = deque(maxlen=100) - self.performance_report_sub = None - self._is_stopping = False - - def get_bot_performance(self): - return self._bot_performance - - def get_bot_error_logs(self): - return list(self._bot_error_logs) - - def get_bot_general_logs(self): - return list(self._bot_general_logs) - - def _init_endpoints(self): - super()._init_endpoints() - self.performance_report_sub = self.create_subscriber(topic=self._performance_topic, - on_message=self._update_bot_performance) - - def _update_bot_performance(self, msg): - for controller_id, performance_report in msg.items(): - self._bot_performance[controller_id] = performance_report - - def _on_log(self, log): - if log.level_name == "ERROR": - self._bot_error_logs.append(log) - else: - self._bot_general_logs.append(log) - - def stop(self): - self._is_stopping = True - try: - super().stop() - except ConnectionError: - # Expected when bot disconnects - logger.debug(f"Bot {self._bot_id} disconnected as expected") - except Exception as e: - logger.error(f"Error stopping listener for bot {self._bot_id}: {e}") - finally: - self._bot_performance = {} - self._is_stopping = False +# HummingbotPerformanceListener class is no longer needed +# All functionality is now handled by MQTTManager -class BotsManager: +class BotsOrchestrator: + """Orchestrates Hummingbot instances using Docker and MQTT communication.""" + def __init__(self, broker_host, broker_port, broker_username, broker_password): self.broker_host = broker_host self.broker_port = broker_port self.broker_username = broker_username self.broker_password = broker_password + + # Initialize Docker client self.docker_client = docker.from_env() + + # Initialize MQTT manager + self.mqtt_manager = MQTTManager(host=broker_host, port=broker_port, username=broker_username, password=broker_password) + + # Active bots tracking self.active_bots = {} self._update_bots_task: Optional[asyncio.Task] = None + # MQTT manager will be started asynchronously later + @staticmethod def hummingbot_containers_fiter(container): + """Filter for Hummingbot containers.""" try: return "hummingbot" in container.name and "broker" not in container.name except Exception: @@ -88,84 +50,168 @@ def _sync_get_active_containers(self): return [ container.name for container in self.docker_client.containers.list() - if container.status == 'running' and self.hummingbot_containers_fiter(container) + if container.status == "running" and self.hummingbot_containers_fiter(container) ] def start_update_active_bots_loop(self): - self._update_bots_task = asyncio.create_task(self.update_active_bots()) + """Start the loop that monitors active bots.""" + # Start MQTT manager and update loop in async context + self._update_bots_task = asyncio.create_task(self._start_async()) + + async def _start_async(self): + """Start MQTT manager and update loop asynchronously.""" + logger.info("Starting MQTT manager...") + await self.mqtt_manager.start() + + # Then start the update loop + await self.update_active_bots() def stop_update_active_bots_loop(self): + """Stop the active bots monitoring loop.""" if self._update_bots_task: self._update_bots_task.cancel() self._update_bots_task = None + # Stop MQTT manager asynchronously + asyncio.create_task(self.mqtt_manager.stop()) + async def update_active_bots(self, sleep_time=1): + """Monitor and update active bots list using both Docker and MQTT discovery.""" while True: try: - active_hbot_containers = await self.get_active_containers() + # Get bots from Docker containers + docker_bots = await self.get_active_containers() + + # Get bots from MQTT messages (auto-discovered) + mqtt_bots = self.mqtt_manager.get_discovered_bots(timeout_seconds=300) # 5 minute timeout + + # Combine both sources + all_active_bots = set(docker_bots + mqtt_bots) + # Remove bots that are no longer active - for bot in list(self.active_bots): - if bot not in active_hbot_containers: - # Properly stop the listener before removing - try: - self.active_bots[bot]["broker_listener"].stop() - except Exception as e: - logger.warning(f"Error stopping listener for {bot}: {e}") - del self.active_bots[bot] - - # Add new bots or update existing ones - for bot in active_hbot_containers: - if bot not in self.active_bots: - try: - hbot_listener = HummingbotPerformanceListener(host=self.broker_host, port=self.broker_port, - username=self.broker_username, - password=self.broker_password, - bot_id=bot) - hbot_listener.start() - self.active_bots[bot] = { - "bot_name": bot, - "broker_client": BotCommands(host=self.broker_host, port=self.broker_port, - username=self.broker_username, password=self.broker_password, - bot_id=bot), - "broker_listener": hbot_listener, - } - except Exception as e: - logger.error(f"Error creating listener for {bot}: {e}") + for bot_name in list(self.active_bots): + if bot_name not in all_active_bots: + self.mqtt_manager.clear_bot_data(bot_name) + del self.active_bots[bot_name] + + # Add new bots + for bot_name in all_active_bots: + if bot_name not in self.active_bots: + self.active_bots[bot_name] = { + "bot_name": bot_name, + "status": "connected", + "source": "docker" if bot_name in docker_bots else "mqtt", + } + # Subscribe to this specific bot's topics + await self.mqtt_manager.subscribe_to_bot(bot_name) + except Exception as e: - logger.error(f"Error in update_active_bots: {e}") + logger.error(f"Error in update_active_bots: {e}", exc_info=True) + await asyncio.sleep(sleep_time) # Interact with a specific bot - def start_bot(self, bot_name, **kwargs): - if bot_name in self.active_bots: - self.active_bots[bot_name]["broker_listener"].start() - return self.active_bots[bot_name]["broker_client"].start(**kwargs) - - def stop_bot(self, bot_name, **kwargs): - if bot_name in self.active_bots: - # First stop the bot command - result = self.active_bots[bot_name]["broker_client"].stop(**kwargs) - - # Then stop the listener, catching any connection errors - try: - self.active_bots[bot_name]["broker_listener"].stop() - except Exception as e: - logger.warning(f"Error stopping listener for {bot_name}: {e}") - # Don't re-raise, as this is expected when bot disconnects - - return result + async def start_bot(self, bot_name, **kwargs): + """ + Start a bot with optional script. + Maintains backward compatibility with kwargs. + """ + if bot_name not in self.active_bots: + logger.warning(f"Bot {bot_name} not found in active bots") + return {"success": False, "message": f"Bot {bot_name} not found"} + + # Create StartCommandMessage.Request format + data = { + "log_level": kwargs.get("log_level"), + "script": kwargs.get("script"), + "conf": kwargs.get("conf"), + "is_quickstart": kwargs.get("is_quickstart", False), + "async_backend": kwargs.get("async_backend", True), + } + + success = await self.mqtt_manager.publish_command(bot_name, "start", data) + return {"success": success} + + async def stop_bot(self, bot_name, **kwargs): + """ + Stop a bot. + Maintains backward compatibility with kwargs. + """ + if bot_name not in self.active_bots: + logger.warning(f"Bot {bot_name} not found in active bots") + return {"success": False, "message": f"Bot {bot_name} not found"} + + # Create StopCommandMessage.Request format + data = { + "skip_order_cancellation": kwargs.get("skip_order_cancellation", False), + "async_backend": kwargs.get("async_backend", True), + } - def import_strategy_for_bot(self, bot_name, strategy, **kwargs): - if bot_name in self.active_bots: - return self.active_bots[bot_name]["broker_client"].import_strategy(strategy, **kwargs) + success = await self.mqtt_manager.publish_command(bot_name, "stop", data) - def configure_bot(self, bot_name, params, **kwargs): - if bot_name in self.active_bots: - return self.active_bots[bot_name]["broker_client"].config(params, **kwargs) + # Clear performance data after stop command to immediately reflect stopped status + if success: + self.mqtt_manager.clear_bot_performance(bot_name) - def get_bot_history(self, bot_name, **kwargs): - if bot_name in self.active_bots: - return self.active_bots[bot_name]["broker_client"].history(**kwargs) + return {"success": success} + + async def import_strategy_for_bot(self, bot_name, strategy, **kwargs): + """ + Import a strategy configuration for a bot. + Maintains backward compatibility. + """ + if bot_name not in self.active_bots: + logger.warning(f"Bot {bot_name} not found in active bots") + return {"success": False, "message": f"Bot {bot_name} not found"} + + # Create ImportCommandMessage.Request format + data = {"strategy": strategy} + success = await self.mqtt_manager.publish_command(bot_name, "import_strategy", data) + return {"success": success} + + async def configure_bot(self, bot_name, params, **kwargs): + """ + Configure bot parameters. + Maintains backward compatibility. + """ + if bot_name not in self.active_bots: + logger.warning(f"Bot {bot_name} not found in active bots") + return {"success": False, "message": f"Bot {bot_name} not found"} + + # Create ConfigCommandMessage.Request format + data = {"params": params} + success = await self.mqtt_manager.publish_command(bot_name, "config", data) + return {"success": success} + + async def get_bot_history(self, bot_name, **kwargs): + """ + Request bot trading history and wait for the response. + Maintains backward compatibility. + """ + if bot_name not in self.active_bots: + logger.warning(f"Bot {bot_name} not found in active bots") + return {"success": False, "message": f"Bot {bot_name} not found"} + + # Create HistoryCommandMessage.Request format + data = { + "days": kwargs.get("days", 0), + "verbose": kwargs.get("verbose", False), + "precision": kwargs.get("precision"), + "async_backend": kwargs.get("async_backend", False), + } + + # Use the new RPC method to wait for response + timeout = kwargs.get("timeout", 30.0) # Default 30 second timeout + response = await self.mqtt_manager.publish_command_and_wait(bot_name, "history", data, timeout=timeout) + + if response is None: + return { + "success": False, + "message": f"No response received from {bot_name} within {timeout} seconds", + "timeout": True, + } + + return {"success": True, "data": response} @staticmethod def determine_controller_performance(controllers_performance): @@ -174,10 +220,7 @@ def determine_controller_performance(controllers_performance): try: # Check if all the metrics are numeric _ = sum(metric for key, metric in performance.items() if key not in ("positions_summary", "close_type_counts")) - cleaned_performance[controller] = { - "status": "running", - "performance": performance - } + cleaned_performance[controller] = {"status": "running", "performance": performance} except Exception as e: cleaned_performance[controller] = { "status": "error", @@ -186,28 +229,46 @@ def determine_controller_performance(controllers_performance): return cleaned_performance def get_all_bots_status(self): + """Get status information for all active bots.""" all_bots_status = {} for bot in self.active_bots: - all_bots_status[bot] = self.get_bot_status(bot) + status = self.get_bot_status(bot) + status["source"] = self.active_bots[bot].get("source", "unknown") + all_bots_status[bot] = status return all_bots_status def get_bot_status(self, bot_name): - if bot_name in self.active_bots: - try: - broker_listener = self.active_bots[bot_name]["broker_listener"] - controllers_performance = broker_listener.get_bot_performance() - performance = self.determine_controller_performance(controllers_performance) - error_logs = broker_listener.get_bot_error_logs() - general_logs = broker_listener.get_bot_general_logs() - status = "running" if len(performance) > 0 else "stopped" - return { - "status": status, - "performance": performance, - "error_logs": error_logs, - "general_logs": general_logs - } - except Exception as e: - return { - "status": "error", - "error": str(e) - } + """ + Get status information for a specific bot. + """ + if bot_name not in self.active_bots: + return {"status": "not_found", "error": f"Bot {bot_name} not found"} + + try: + # Get data from MQTT manager + controllers_performance = self.mqtt_manager.get_bot_performance(bot_name) + performance = self.determine_controller_performance(controllers_performance) + error_logs = self.mqtt_manager.get_bot_error_logs(bot_name) + general_logs = self.mqtt_manager.get_bot_logs(bot_name) + + # Check if bot has sent recent messages (within last 30 seconds) + discovered_bots = self.mqtt_manager.get_discovered_bots(timeout_seconds=30) + recently_active = bot_name in discovered_bots + + # Determine status based on performance data and recent activity + if len(performance) > 0 and recently_active: + status = "running" + elif len(performance) > 0 and not recently_active: + status = "idle" # Has performance data but no recent activity + else: + status = "stopped" + + return { + "status": status, + "performance": performance, + "error_logs": error_logs, + "general_logs": general_logs, + "recently_active": recently_active, + } + except Exception as e: + return {"status": "error", "error": str(e)} From 734ec89acd77fa4cc245aa52c50189e8bec53f6c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:00:33 +0700 Subject: [PATCH 015/244] (feat) add RLUSD for xrpl --- services/__init__.py | 9 +++++++++ services/accounts_service.py | 6 +++--- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/services/__init__.py b/services/__init__.py index e69de29b..2bd037e2 100644 --- a/services/__init__.py +++ b/services/__init__.py @@ -0,0 +1,9 @@ +from .accounts_service import AccountsService +from .bots_orchestrator import BotsOrchestrator +from .docker_service import DockerService + +__all__ = [ + "AccountsService", + "BotsOrchestrator", + "DockerService", +] \ No newline at end of file diff --git a/services/accounts_service.py b/services/accounts_service.py index 4f7e3d63..3a458512 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,7 +1,7 @@ import asyncio import json import logging -from datetime import datetime, timedelta +from datetime import datetime from decimal import Decimal from typing import Optional @@ -9,7 +9,7 @@ from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger from config import BANNED_TOKENS, CONFIG_PASSWORD -from services.connector_manager import ConnectorManager +from utils.connector_manager import ConnectorManager from utils.file_system import FileSystemUtil file_system = FileSystemUtil() @@ -24,7 +24,7 @@ class AccountsService: default_quotes = { "hyperliquid": "USD", "hyperliquid_perpetual": "USDC", - "xrpl": "RLUSD" + "xrpl": "RLUSD", } def __init__(self, From 8bc308d2f0a79989f9308f41345a4fe9d486faa7 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:00:53 +0700 Subject: [PATCH 016/244] (feat) move to utils --- utils/{models.py => backend_api_config_adapter.py} | 0 {services => utils}/bot_archiver.py | 0 {services => utils}/connector_manager.py | 2 +- 3 files changed, 1 insertion(+), 1 deletion(-) rename utils/{models.py => backend_api_config_adapter.py} (100%) rename {services => utils}/bot_archiver.py (100%) rename {services => utils}/connector_manager.py (98%) diff --git a/utils/models.py b/utils/backend_api_config_adapter.py similarity index 100% rename from utils/models.py rename to utils/backend_api_config_adapter.py diff --git a/services/bot_archiver.py b/utils/bot_archiver.py similarity index 100% rename from services/bot_archiver.py rename to utils/bot_archiver.py diff --git a/services/connector_manager.py b/utils/connector_manager.py similarity index 98% rename from services/connector_manager.py rename to utils/connector_manager.py index b6412391..6a9729ab 100644 --- a/services/connector_manager.py +++ b/utils/connector_manager.py @@ -6,7 +6,7 @@ from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class from hummingbot.client.settings import AllConnectorSettings -from utils.models import BackendAPIConfigAdapter +from utils.backend_api_config_adapter import BackendAPIConfigAdapter from utils.security import BackendAPISecurity From ee650c6b904bcc3ba213f9f0a2f533322d5e9796 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:02:15 +0700 Subject: [PATCH 017/244] (feat) add mqtt manager to replace commonlib --- utils/mqtt_manager.py | 511 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 511 insertions(+) create mode 100644 utils/mqtt_manager.py diff --git a/utils/mqtt_manager.py b/utils/mqtt_manager.py new file mode 100644 index 00000000..f6b0c479 --- /dev/null +++ b/utils/mqtt_manager.py @@ -0,0 +1,511 @@ +import asyncio +import json +import logging +import time +from collections import defaultdict, deque +from contextlib import asynccontextmanager +from typing import Any, Callable, Dict, Optional, Set + +import aiomqtt + +logger = logging.getLogger(__name__) + + +class MQTTManager: + """ + Manages MQTT connections and message handling for Hummingbot bot communication. + Uses asyncio-mqtt (aiomqtt) for asynchronous MQTT operations. + """ + + def __init__(self, host: str, port: int, username: str, password: str): + self.host = host + self.port = port + self.username = username + self.password = password + + # Message handlers by topic pattern + self._handlers: Dict[str, Callable] = {} + + # Bot data storage + self._bot_performance: Dict[str, Dict] = defaultdict(dict) + self._bot_logs: Dict[str, deque] = defaultdict(lambda: deque(maxlen=100)) + self._bot_error_logs: Dict[str, deque] = defaultdict(lambda: deque(maxlen=100)) + + # Auto-discovered bots + self._discovered_bots: Dict[str, float] = {} # bot_id: last_seen_timestamp + + # Connection state + self._connected = False + self._reconnect_interval = 5 # seconds + self._client: Optional[aiomqtt.Client] = None + self._tasks: Set[asyncio.Task] = set() + + # RPC response tracking + self._pending_responses: Dict[str, asyncio.Future] = {} # reply_to_topic: future + + # Subscriptions to restore on reconnect + self._subscriptions = [ + ("hbot/+/log", 1), # Log messages + ("hbot/+/notify", 1), # Notifications + ("hbot/+/status_updates", 1), # Status updates + ("hbot/+/events", 1), # Internal events + ("hbot/+/hb", 1), # Heartbeats + ("hbot/+/performance", 1), # Performance metrics + ("hbot/+/external/event/+", 1), # External events + ("backend-api/response/+", 1), # RPC responses to our reply_to topics + ] + + if username: + logger.info(f"MQTT client configured for user: {username}") + else: + logger.info("MQTT client configured without authentication") + + @asynccontextmanager + async def _get_client(self): + """Get MQTT client with automatic reconnection.""" + while True: + try: + client_id = f"backend-api-{int(time.time())}" + + # Create client with credentials if provided + if self.username and self.password: + client = aiomqtt.Client( + hostname=self.host, + port=self.port, + username=self.username, + password=self.password, + identifier=client_id, + keepalive=60, + ) + else: + client = aiomqtt.Client(hostname=self.host, port=self.port, identifier=client_id, keepalive=60) + + async with client: + self._connected = True + logger.info(f"✓ Connected to MQTT broker at {self.host}:{self.port}") + + # Subscribe to topics + for topic, qos in self._subscriptions: + await client.subscribe(topic, qos=qos) + yield client + + except aiomqtt.MqttError as error: + self._connected = False + logger.error(f'MQTT Error "{error}". Reconnecting in {self._reconnect_interval} seconds.') + await asyncio.sleep(self._reconnect_interval) + except Exception as e: + self._connected = False + logger.error(f"Unexpected error: {e}. Reconnecting in {self._reconnect_interval} seconds.") + await asyncio.sleep(self._reconnect_interval) + + async def _handle_messages(self): + """Main message handling loop.""" + async with self._get_client() as client: + self._client = client + async for message in client.messages: + await self._process_message(message) + + async def _process_message(self, message): + """Process incoming MQTT message.""" + try: + topic = str(message.topic) + + # Check if this is an RPC response to our backend-api + if topic.startswith("backend-api/response/"): + await self._handle_rpc_response(topic, message) + return + + topic_parts = topic.split("/") + + # Check if this matches namespace/instance_id/channel pattern + if len(topic_parts) >= 3: + namespace, bot_id, channel = topic_parts[0], topic_parts[1], "/".join(topic_parts[2:]) + # Only process if it's the expected namespace + if namespace == "hbot": + # Auto-discover bot + self._discovered_bots[bot_id] = time.time() + # Parse message + try: + data = json.loads(message.payload.decode("utf-8")) + except json.JSONDecodeError: + data = message.payload.decode("utf-8") + + # Route to appropriate handler based on Hummingbot's topics + if channel == "log": + await self._handle_log(bot_id, data) + elif channel == "notify": + await self._handle_notify(bot_id, data) + elif channel == "status_updates": + await self._handle_status(bot_id, data) + elif channel == "hb": # heartbeat + await self._handle_heartbeat(bot_id, data) + elif channel == "events": + await self._handle_events(bot_id, data) + elif channel == "performance": + await self._handle_performance(bot_id, data) + elif channel.startswith("response/"): + await self._handle_command_response(bot_id, channel, data) + elif channel.startswith("external/event/"): + await self._handle_external_event(bot_id, channel, data) + elif channel in ["history", "start", "stop", "config", "import_strategy"]: + # These are command channels - responses should come on response/* topics + logger.debug(f"Command channel '{channel}' for bot {bot_id} - waiting for response") + else: + logger.info(f"Unknown channel '{channel}' for bot {bot_id}") + + # Call custom handlers + for pattern, handler in self._handlers.items(): + if self._match_topic(pattern, topic): + if asyncio.iscoroutinefunction(handler): + await handler(bot_id, channel, data) + else: + # Run sync handler in executor + await asyncio.get_event_loop().run_in_executor(None, handler, bot_id, channel, data) + except Exception as e: + logger.error(f"Error processing message from {message.topic}: {e}", exc_info=True) + + def _match_topic(self, pattern: str, topic: str) -> bool: + """Check if topic matches pattern (supports + wildcard).""" + pattern_parts = pattern.split("/") + topic_parts = topic.split("/") + + if len(pattern_parts) != len(topic_parts): + return False + + for p, t in zip(pattern_parts, topic_parts): + if p != "+" and p != t: + return False + return True + + async def _handle_performance(self, bot_id: str, data: Any): + """Handle performance updates.""" + if isinstance(data, dict): + for controller_id, performance in data.items(): + if bot_id not in self._bot_performance: + self._bot_performance[bot_id] = {} + self._bot_performance[bot_id][controller_id] = performance + + async def _handle_log(self, bot_id: str, data: Any): + """Handle log messages.""" + if isinstance(data, dict): + # Check for different possible field names + level = data.get("level_name") or data.get("levelname") or data.get("level", "INFO") + message = data.get("msg") or data.get("message", "") + + # Normalize the log entry + log_entry = { + "level_name": level, + "msg": message, + "timestamp": data.get("timestamp") or data.get("time") or time.time(), + **data, # Include all original fields + } + + if level.upper() == "ERROR": + self._bot_error_logs[bot_id].append(log_entry) + else: + self._bot_logs[bot_id].append(log_entry) + elif isinstance(data, str): + # Handle plain string logs + log_entry = {"level_name": "INFO", "msg": data, "timestamp": time.time()} + self._bot_logs[bot_id].append(log_entry) + + async def _handle_notify(self, bot_id: str, data: Any): + """Handle notification messages.""" + # Store notifications if needed + + async def _handle_status(self, bot_id: str, data: Any): + """Handle status updates.""" + # Store latest status + + async def _handle_heartbeat(self, bot_id: str, data: Any): + """Handle heartbeat messages.""" + self._discovered_bots[bot_id] = time.time() # Update last seen + + async def _handle_events(self, bot_id: str, data: Any): + """Handle internal events.""" + # Process events as needed + + async def _handle_external_event(self, bot_id: str, channel: str, data: Any): + """Handle external events.""" + event_type = channel.split("/")[-1] + + async def _handle_rpc_response(self, topic: str, message): + """Handle RPC responses on backend-api/response/* topics.""" + try: + # Parse the response data + try: + data = json.loads(message.payload.decode("utf-8")) + except json.JSONDecodeError: + data = message.payload.decode("utf-8") + + # Check if we have a pending response for this topic + if topic in self._pending_responses: + future = self._pending_responses.pop(topic) + if not future.done(): + future.set_result(data) + else: + logger.warning(f"No pending RPC response found for topic: {topic}") + + except Exception as e: + logger.error(f"Error handling RPC response on {topic}: {e}", exc_info=True) + + async def _handle_command_response(self, bot_id: str, channel: str, data: Any): + """Handle command responses (legacy - keeping for backward compatibility).""" + # Extract command from response channel (e.g., response/start/1234567890 or response/history) + channel_parts = channel.split("/") + if len(channel_parts) >= 2: + command = channel_parts[1] + + async def start(self): + """Start the MQTT client.""" + try: + # Create and store the main message handling task + task = asyncio.create_task(self._handle_messages()) + self._tasks.add(task) + task.add_done_callback(self._tasks.discard) + + logger.info("MQTT client started") + + # Wait a bit for connection to establish + for i in range(10): + if self._connected: + logger.info("MQTT connection established successfully") + break + await asyncio.sleep(0.5) + else: + logger.warning("MQTT connection not established after 5 seconds") + + except Exception as e: + logger.error(f"Failed to start MQTT client: {e}", exc_info=True) + + async def stop(self): + """Stop the MQTT client.""" + self._connected = False + + # Cancel all running tasks + for task in self._tasks: + task.cancel() + + # Wait for all tasks to complete + await asyncio.gather(*self._tasks, return_exceptions=True) + + logger.info("MQTT client stopped") + + async def publish_command_and_wait( + self, bot_id: str, command: str, data: Dict[str, Any], timeout: float = 30.0, qos: int = 1 + ) -> Optional[Any]: + """ + Publish a command to a bot and wait for the response. + + :param bot_id: The bot instance ID + :param command: The command to send + :param data: Command data + :param timeout: Timeout in seconds to wait for response + :param qos: Quality of Service level + :return: Response data if received, None if timeout or error + """ + if not self._connected or not self._client: + logger.error("Not connected to MQTT broker") + return None + + # Generate unique reply_to topic + timestamp = int(time.time() * 1000) + reply_to_topic = f"backend-api/response/{timestamp}" + + # Create a future to track the response using the reply_to topic as key + future = asyncio.Future() + self._pending_responses[reply_to_topic] = future + + try: + # Send the command with custom reply_to + success = await self._publish_command_with_reply_to(bot_id, command, data, reply_to_topic, qos) + if not success: + self._pending_responses.pop(reply_to_topic, None) + return None + + # Wait for response with timeout + try: + response = await asyncio.wait_for(future, timeout=timeout) + return response + except asyncio.TimeoutError: + logger.warning(f"⏰ Timeout waiting for response from {bot_id} for command '{command}' on {reply_to_topic}") + self._pending_responses.pop(reply_to_topic, None) + return None + + except Exception as e: + logger.error(f"Error sending command and waiting for response: {e}") + self._pending_responses.pop(reply_to_topic, None) + return None + + async def _publish_command_with_reply_to( + self, bot_id: str, command: str, data: Dict[str, Any], reply_to: str, qos: int = 1 + ) -> bool: + """ + Publish a command to a bot with custom reply_to topic. + + :param bot_id: The bot instance ID + :param command: The command to send + :param data: Command data + :param reply_to: Custom reply_to topic + :param qos: Quality of Service level + :return: True if published successfully + """ + if not self._connected or not self._client: + logger.error("Not connected to MQTT broker") + return False + + # Convert dots to slashes for MQTT topic + mqtt_bot_id = bot_id.replace(".", "/") + + # Use the correct topic for each command + topic = f"hbot/{mqtt_bot_id}/{command}" + + # Create the full RPC message structure with custom reply_to + message = { + "header": { + "timestamp": int(time.time() * 1000), # Milliseconds + "reply_to": reply_to, # Custom reply_to topic + "msg_id": int(time.time() * 1000), + "node_id": "backend-api", + "agent": "backend-api", + "properties": {}, + }, + "data": data or {}, + } + + try: + await self._client.publish(topic, payload=json.dumps(message), qos=qos) + return True + except Exception as e: + logger.error(f"Failed to publish command to {bot_id}: {e}") + return False + + async def publish_command(self, bot_id: str, command: str, data: Dict[str, Any], qos: int = 1) -> bool: + """ + Publish a command to a bot using proper RPCMessage Request format. + + :param bot_id: The bot instance ID + :param command: The command to send + :param data: Command data (should match the specific CommandMessage.Request structure) + :param qos: Quality of Service level + :return: True if published successfully + """ + if not self._connected or not self._client: + logger.error("Not connected to MQTT broker") + return False + + # Convert dots to slashes for MQTT topic + mqtt_bot_id = bot_id.replace(".", "/") + + # Use the correct topic for each command + topic = f"hbot/{mqtt_bot_id}/{command}" + + # Create the full RPC message structure as expected by commlib + # Based on RPCClient._prepare_request method + message = { + "header": { + "timestamp": int(time.time() * 1000), # Milliseconds + "reply_to": f"backend-api-response-{int(time.time() * 1000)}", # Unique response topic + "msg_id": int(time.time() * 1000), + "node_id": "backend-api", + "agent": "backend-api", + "properties": {}, + }, + "data": data or {}, + } + + try: + await self._client.publish(topic, payload=json.dumps(message), qos=qos) + return True + except Exception as e: + logger.error(f"Failed to publish command to {bot_id}: {e}") + return False + + def add_handler(self, topic_pattern: str, handler: Callable): + """ + Add a custom message handler for a topic pattern. + + :param topic_pattern: Topic pattern (supports + wildcard) + :param handler: Callback function(bot_id, channel, data) - can be sync or async + """ + self._handlers[topic_pattern] = handler + + def remove_handler(self, topic_pattern: str): + """Remove a message handler.""" + self._handlers.pop(topic_pattern, None) + + def get_bot_performance(self, bot_id: str) -> Dict[str, Any]: + """Get performance data for a bot.""" + return self._bot_performance.get(bot_id, {}) + + def get_bot_logs(self, bot_id: str) -> list: + """Get recent logs for a bot.""" + return list(self._bot_logs.get(bot_id, [])) + + def get_bot_error_logs(self, bot_id: str) -> list: + """Get recent error logs for a bot.""" + return list(self._bot_error_logs.get(bot_id, [])) + + def clear_bot_data(self, bot_id: str): + """Clear stored data for a bot.""" + self._bot_performance.pop(bot_id, None) + self._bot_logs.pop(bot_id, None) + self._bot_error_logs.pop(bot_id, None) + + def clear_bot_performance(self, bot_id: str): + """Clear only performance data for a bot (useful when bot is stopped).""" + self._bot_performance.pop(bot_id, None) + + @property + def is_connected(self) -> bool: + """Check if connected to MQTT broker.""" + return self._connected + + def get_discovered_bots(self, timeout_seconds: int = 300) -> list: + """Get list of auto-discovered bots. + + :param timeout_seconds: Consider bots inactive after this many seconds without messages + :return: List of active bot IDs + """ + current_time = time.time() + active_bots = [ + bot_id for bot_id, last_seen in self._discovered_bots.items() if current_time - last_seen < timeout_seconds + ] + return active_bots + + async def subscribe_to_bot(self, bot_id: str): + """Subscribe to all topics for a specific bot.""" + if self._connected and self._client: + # Convert dots to slashes for MQTT topic + mqtt_bot_id = bot_id.replace(".", "/") + + # Subscribe to all topics for this specific bot + topic = f"hbot/{mqtt_bot_id}/#" + await self._client.subscribe(topic, qos=1) + else: + logger.warning(f"Cannot subscribe to bot {bot_id} - not connected to MQTT") + + +if __name__ == "__main__": + # Example usage + import sys + + # For Windows compatibility + if sys.platform == "win32": + asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) + + logging.basicConfig(level=logging.INFO) + + async def main(): + mqtt_manager = MQTTManager(host="localhost", port=1883, username="", password="") + + await mqtt_manager.start() + + try: + # Keep running to listen for messages + while True: + await asyncio.sleep(1) + except KeyboardInterrupt: + await mqtt_manager.stop() + + asyncio.run(main()) From 881f204d6d76a0236b938d5dacc586cd5d6dbb67 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:02:33 +0700 Subject: [PATCH 018/244] (feat) adapt import --- utils/security.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/utils/security.py b/utils/security.py index 22c89aa4..c87e104d 100644 --- a/utils/security.py +++ b/utils/security.py @@ -12,7 +12,7 @@ from config import PASSWORD_VERIFICATION_PATH from utils.file_system import FileSystemUtil -from utils.models import BackendAPIConfigAdapter +from utils.backend_api_config_adapter import BackendAPIConfigAdapter class BackendAPISecurity(Security): From b866365b63c4b12687efd8f67f2df008ec087537 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:05:08 +0700 Subject: [PATCH 019/244] (feat) adapt import --- routers/manage_docker.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/routers/manage_docker.py b/routers/manage_docker.py index 3e1f3423..19cfb99a 100644 --- a/routers/manage_docker.py +++ b/routers/manage_docker.py @@ -4,11 +4,11 @@ from fastapi import APIRouter, HTTPException from models import HummingbotInstanceConfig, ImageName -from services.bot_archiver import BotArchiver -from services.docker_service import DockerManager +from utils.bot_archiver import BotArchiver +from services.docker_service import DockerService router = APIRouter(tags=["Docker"]) -docker_manager = DockerManager() +docker_manager = DockerService() bot_archiver = BotArchiver(os.environ.get("AWS_API_KEY"), os.environ.get("AWS_SECRET_KEY"), os.environ.get("S3_DEFAULT_BUCKET_NAME")) From 0a7102ce946029ad645aeba04da353ce408c91be Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:05:27 +0700 Subject: [PATCH 020/244] (feat) refactor to use new mqtt manager --- routers/manage_broker_messages.py | 77 +++++++++++++++++++++++++------ 1 file changed, 62 insertions(+), 15 deletions(-) diff --git a/routers/manage_broker_messages.py b/routers/manage_broker_messages.py index 11b90ed7..3c7ed06d 100644 --- a/routers/manage_broker_messages.py +++ b/routers/manage_broker_messages.py @@ -1,18 +1,30 @@ from fastapi import APIRouter, HTTPException from config import BROKER_HOST, BROKER_PASSWORD, BROKER_PORT, BROKER_USERNAME -from models import ImportStrategyAction, StartBotAction, StopBotAction -from services.bots_orchestrator import BotsManager +from models import StartBotAction, StopBotAction +from services.bots_orchestrator import BotsOrchestrator # Initialize the scheduler router = APIRouter(tags=["Broker"]) -bots_manager = BotsManager(broker_host=BROKER_HOST, broker_port=BROKER_PORT, broker_username=BROKER_USERNAME, - broker_password=BROKER_PASSWORD) +# Log the broker configuration +import logging +logger = logging.getLogger(__name__) +logger.info(f"Broker config - Host: {BROKER_HOST}, Port: {BROKER_PORT}, Username: '{BROKER_USERNAME}', Has Password: {bool(BROKER_PASSWORD)}") + +bots_manager = BotsOrchestrator(broker_host=BROKER_HOST, broker_port=BROKER_PORT, broker_username=BROKER_USERNAME, + broker_password=BROKER_PASSWORD) + + +# Startup and shutdown will be handled by lifespan context +_startup_task_created = False @router.on_event("startup") async def startup_event(): - bots_manager.start_update_active_bots_loop() + global _startup_task_created + if not _startup_task_created: + bots_manager.start_update_active_bots_loop() + _startup_task_created = True @router.on_event("shutdown") @@ -27,6 +39,29 @@ def get_active_bots_status(): return {"status": "success", "data": bots_manager.get_all_bots_status()} +@router.get("/mqtt-status") +def get_mqtt_status(): + """Get MQTT connection status and discovered bots.""" + mqtt_connected = bots_manager.mqtt_manager.is_connected + discovered_bots = bots_manager.mqtt_manager.get_discovered_bots() + active_bots = list(bots_manager.active_bots.keys()) + + # Check client state + client_state = "connected" if bots_manager.mqtt_manager.is_connected else "disconnected" + + return { + "status": "success", + "data": { + "mqtt_connected": mqtt_connected, + "discovered_bots": discovered_bots, + "active_bots": active_bots, + "broker_host": bots_manager.broker_host, + "broker_port": bots_manager.broker_port, + "broker_username": bots_manager.broker_username, + "client_state": client_state + } + } + @router.get("/get-bot-status/{bot_name}") def get_bot_status(bot_name: str): response = bots_manager.get_bot_status(bot_name) @@ -39,26 +74,38 @@ def get_bot_status(bot_name: str): @router.get("/get-bot-history/{bot_name}") -def get_bot_history(bot_name: str): - response = bots_manager.get_bot_history(bot_name) +async def get_bot_history( + bot_name: str, + days: int = 0, + verbose: bool = False, + precision: int = None, + timeout: float = 30.0 +): + """Get trading history for a bot with optional parameters.""" + response = await bots_manager.get_bot_history( + bot_name, + days=days, + verbose=verbose, + precision=precision, + timeout=timeout + ) return {"status": "success", "response": response} @router.post("/start-bot") -def start_bot(action: StartBotAction): - response = bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, +async def start_bot(action: StartBotAction): + response = await bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, conf=action.conf, async_backend=action.async_backend) return {"status": "success", "response": response} @router.post("/stop-bot") -def stop_bot(action: StopBotAction): - response = bots_manager.stop_bot(action.bot_name, skip_order_cancellation=action.skip_order_cancellation, +async def stop_bot(action: StopBotAction): + response = await bots_manager.stop_bot(action.bot_name, skip_order_cancellation=action.skip_order_cancellation, async_backend=action.async_backend) return {"status": "success", "response": response} -@router.post("/import-strategy") -def import_strategy(action: ImportStrategyAction): - response = bots_manager.import_strategy_for_bot(action.bot_name, action.strategy) - return {"status": "success", "response": response} + + + From 39bb34c9383523529ff5a6a534ed0c26c5f9df8c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 6 Jun 2025 14:05:47 +0700 Subject: [PATCH 021/244] (feat) setup logging --- main.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/main.py b/main.py index e3fe11cf..4bd57bb5 100644 --- a/main.py +++ b/main.py @@ -20,7 +20,18 @@ manage_market_data, manage_performance, ) -from utils.mqtt_exception_handler import setup_global_mqtt_exception_handler + +# Configure logging +import logging + +# Set up logging configuration +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s' +) + +# Enable debug logging for MQTT manager +logging.getLogger('services.mqtt_manager').setLevel(logging.DEBUG) # Load environment variables early load_dotenv() @@ -41,7 +52,6 @@ async def lifespan(app: FastAPI): Handles startup and shutdown events. """ # Startup logic - setup_global_mqtt_exception_handler() yield # Shutdown logic (add cleanup code here if needed) @@ -93,9 +103,7 @@ def auth_user( app.include_router(manage_docker.router, dependencies=[Depends(auth_user)]) app.include_router(manage_accounts.router, dependencies=[Depends(auth_user)]) app.include_router(manage_broker_messages.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_files.configs_router, dependencies=[Depends(auth_user)]) -app.include_router(manage_files.controllers_router, dependencies=[Depends(auth_user)]) -app.include_router(manage_files.scripts_router, dependencies=[Depends(auth_user)]) +app.include_router(manage_files.router, dependencies=[Depends(auth_user)]) app.include_router(manage_market_data.router, dependencies=[Depends(auth_user)]) app.include_router(manage_backtesting.router, dependencies=[Depends(auth_user)]) app.include_router(manage_databases.router, dependencies=[Depends(auth_user)]) From a3ea3326871332a3254ee4cc289f76ddeaa0c5d6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sun, 8 Jun 2025 16:28:36 +0700 Subject: [PATCH 022/244] (feat) refactor routers into bot orchestration --- routers/manage_accounts.py | 34 ++-- routers/manage_bot_orchestration.py | 288 ++++++++++++++++++++++++++++ routers/manage_broker_messages.py | 111 ----------- routers/manage_docker.py | 33 ++-- 4 files changed, 312 insertions(+), 154 deletions(-) create mode 100644 routers/manage_bot_orchestration.py delete mode 100644 routers/manage_broker_messages.py diff --git a/routers/manage_accounts.py b/routers/manage_accounts.py index 76c92a29..4d45f023 100644 --- a/routers/manage_accounts.py +++ b/routers/manage_accounts.py @@ -1,34 +1,24 @@ from typing import Dict, List -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, HTTPException, Depends from hummingbot.client.settings import AllConnectorSettings from starlette import status from services.accounts_service import AccountsService from utils.file_system import FileSystemUtil +from deps import get_accounts_service router = APIRouter(tags=["Accounts"]) file_system = FileSystemUtil(base_path="bots/credentials") -accounts_service = AccountsService() - - -@router.on_event("startup") -async def startup_event(): - accounts_service.start_update_account_state_loop() - - -@router.on_event("shutdown") -async def shutdown_event(): - accounts_service.stop_update_account_state_loop() @router.get("/accounts-state", response_model=Dict[str, Dict[str, List[Dict]]]) -async def get_all_accounts_state(): +async def get_all_accounts_state(accounts_service: AccountsService = Depends(get_accounts_service)): return accounts_service.get_accounts_state() @router.get("/account-state-history", response_model=List[Dict]) -async def get_account_state_history(): +async def get_account_state_history(accounts_service: AccountsService = Depends(get_accounts_service)): """ Get the historical state of all accounts. """ @@ -45,12 +35,12 @@ async def available_connectors(): @router.get("/connector-config-map/{connector_name}", response_model=List[str]) -async def get_connector_config_map(connector_name: str): +async def get_connector_config_map(connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): return accounts_service.get_connector_config_map(connector_name) @router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) -async def get_all_connectors_config_map(): +async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): all_config_maps = {} for connector in list(AllConnectorSettings.get_connector_settings().keys()): all_config_maps[connector] = accounts_service.get_connector_config_map(connector) @@ -58,12 +48,12 @@ async def get_all_connectors_config_map(): @router.get("/list-accounts", response_model=List[str]) -async def list_accounts(): +async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): return accounts_service.list_accounts() @router.get("/list-credentials/{account_name}", response_model=List[str]) -async def list_credentials(account_name: str): +async def list_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): try: return accounts_service.list_credentials(account_name) except FileNotFoundError as e: @@ -71,7 +61,7 @@ async def list_credentials(account_name: str): @router.post("/add-account", status_code=status.HTTP_201_CREATED) -async def add_account(account_name: str): +async def add_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): try: accounts_service.add_account(account_name) return {"message": "Credential added successfully."} @@ -80,7 +70,7 @@ async def add_account(account_name: str): @router.post("/delete-account") -async def delete_account(account_name: str): +async def delete_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): try: if account_name == "master_account": raise HTTPException(status_code=400, detail="Cannot delete master account.") @@ -91,7 +81,7 @@ async def delete_account(account_name: str): @router.post("/delete-credential/{account_name}/{connector_name}") -async def delete_credential(account_name: str, connector_name: str): +async def delete_credential(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): try: accounts_service.delete_credentials(account_name, connector_name) return {"message": "Credential deleted successfully."} @@ -100,7 +90,7 @@ async def delete_credential(account_name: str, connector_name: str): @router.post("/add-connector-keys/{account_name}/{connector_name}", status_code=status.HTTP_201_CREATED) -async def add_connector_keys(account_name: str, connector_name: str, keys: Dict): +async def add_connector_keys(account_name: str, connector_name: str, keys: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): try: await accounts_service.add_connector_keys(account_name, connector_name, keys) return {"message": "Connector keys added successfully."} diff --git a/routers/manage_bot_orchestration.py b/routers/manage_bot_orchestration.py new file mode 100644 index 00000000..a9374d62 --- /dev/null +++ b/routers/manage_bot_orchestration.py @@ -0,0 +1,288 @@ +import logging +import os +import asyncio +from datetime import datetime +from fastapi import APIRouter, HTTPException, Depends + +from models import StartBotAction, StopBotAction, HummingbotInstanceConfig, V2ControllerDeployment +from services.bots_orchestrator import BotsOrchestrator +from services.docker_service import DockerService +from deps import get_bots_orchestrator, get_docker_service, get_bot_archiver +from utils.file_system import FileSystemUtil +from utils.bot_archiver import BotArchiver + +router = APIRouter(tags=["Bot Orchestration"]) + + +@router.get("/get-active-bots-status") +def get_active_bots_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + """Returns the cached status of all active bots.""" + return {"status": "success", "data": bots_manager.get_all_bots_status()} + + +@router.get("/mqtt-status") +def get_mqtt_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + """Get MQTT connection status and discovered bots.""" + mqtt_connected = bots_manager.mqtt_manager.is_connected + discovered_bots = bots_manager.mqtt_manager.get_discovered_bots() + active_bots = list(bots_manager.active_bots.keys()) + + # Check client state + client_state = "connected" if bots_manager.mqtt_manager.is_connected else "disconnected" + + return { + "status": "success", + "data": { + "mqtt_connected": mqtt_connected, + "discovered_bots": discovered_bots, + "active_bots": active_bots, + "broker_host": bots_manager.broker_host, + "broker_port": bots_manager.broker_port, + "broker_username": bots_manager.broker_username, + "client_state": client_state + } + } + + +@router.get("/get-bot-status/{bot_name}") +def get_bot_status(bot_name: str, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + response = bots_manager.get_bot_status(bot_name) + if not response: + raise HTTPException(status_code=404, detail="Bot not found") + return { + "status": "success", + "data": response + } + + +@router.get("/get-bot-history/{bot_name}") +async def get_bot_history( + bot_name: str, + days: int = 0, + verbose: bool = False, + precision: int = None, + timeout: float = 30.0, + bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator) +): + """Get trading history for a bot with optional parameters.""" + response = await bots_manager.get_bot_history( + bot_name, + days=days, + verbose=verbose, + precision=precision, + timeout=timeout + ) + return {"status": "success", "response": response} + + +@router.post("/start-bot") +async def start_bot(action: StartBotAction, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + response = await bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, + conf=action.conf, async_backend=action.async_backend) + return {"status": "success", "response": response} + + +@router.post("/stop-bot") +async def stop_bot(action: StopBotAction, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + response = await bots_manager.stop_bot(action.bot_name, skip_order_cancellation=action.skip_order_cancellation, + async_backend=action.async_backend) + return {"status": "success", "response": response} + + +@router.post("/stop-and-archive-bot/{bot_name}") +async def stop_and_archive_bot( + bot_name: str, + skip_order_cancellation: bool = True, + async_backend: bool = True, + archive_locally: bool = True, + s3_bucket: str = None, + timeout: float = 30.0, + bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator), + docker_manager: DockerService = Depends(get_docker_service), + bot_archiver: BotArchiver = Depends(get_bot_archiver) +): + """ + Gracefully stop a bot and archive its data. + This combines the complete shutdown workflow: + 1. Stop the bot trading process via MQTT + 2. Wait for graceful shutdown + 3. Stop the Docker container + 4. Archive the bot data (locally or to S3) + 5. Remove the container + """ + try: + # Step 1: Normalize bot name and container name + # Handle both "process-king" and "hummingbot-process-king" input formats + if bot_name.startswith("hummingbot-"): + # If full container name is passed, extract the bot name + actual_bot_name = bot_name.replace("hummingbot-", "") + container_name = bot_name + else: + # If just bot name is passed, construct container name + actual_bot_name = bot_name + container_name = f"hummingbot-{bot_name}" + + logging.info(f"Normalized bot_name: {actual_bot_name}, container_name: {container_name}") + + # Step 2: Validate bot exists in active bots + active_bots = list(bots_manager.active_bots.keys()) + + # Check if bot exists in active bots (could be stored as either format) + bot_found = (actual_bot_name in active_bots) or (container_name in active_bots) + + if not bot_found: + return { + "status": "error", + "message": f"Bot '{actual_bot_name}' not found in active bots. Active bots: {active_bots}. Cannot perform graceful shutdown.", + "details": { + "input_name": bot_name, + "actual_bot_name": actual_bot_name, + "container_name": container_name, + "active_bots": active_bots, + "reason": "Bot must be actively managed via MQTT for graceful shutdown" + } + } + + # Step 3: Stop the bot trading process + # Use the format that's actually stored in active bots + bot_name_for_orchestrator = container_name if container_name in active_bots else actual_bot_name + logging.info(f"Stopping bot trading process for {bot_name_for_orchestrator}") + stop_response = await bots_manager.stop_bot( + bot_name_for_orchestrator, + skip_order_cancellation=skip_order_cancellation, + async_backend=async_backend + ) + + if not stop_response or not stop_response.get("success", False): + error_msg = stop_response.get('error', 'Unknown error') if stop_response else 'No response from bot orchestrator' + return { + "status": "error", + "message": f"Failed to stop bot process: {error_msg}", + "details": { + "input_name": bot_name, + "actual_bot_name": actual_bot_name, + "container_name": container_name, + "stop_response": stop_response + } + } + + # Step 3: Wait a bit for graceful shutdown + await asyncio.sleep(5) # Give the bot time to clean up + + # Step 4: Stop the container + logging.info(f"Stopping container {container_name}") + stop_container_response = docker_manager.stop_container(container_name) + + if not stop_container_response.get("success", True): + logging.warning(f"Container stop returned: {stop_container_response}") + + # Step 5: Archive the bot data + instance_dir = os.path.join('bots', 'instances', container_name) + logging.info(f"Archiving bot data from {instance_dir}") + + try: + if archive_locally: + bot_archiver.archive_locally(container_name, instance_dir) + else: + bot_archiver.archive_and_upload(container_name, instance_dir, bucket_name=s3_bucket) + except Exception as e: + logging.error(f"Archive failed: {str(e)}") + # Continue with removal even if archive fails + + # Step 6: Remove the container + logging.info(f"Removing container {container_name}") + remove_response = docker_manager.remove_container(container_name, force=False) + + if not remove_response.get("success"): + # If graceful remove fails, try force remove + logging.warning("Graceful container removal failed, attempting force removal") + remove_response = docker_manager.remove_container(container_name, force=True) + + return { + "status": "success", + "message": f"Bot {actual_bot_name} stopped and archived successfully", + "details": { + "input_name": bot_name, + "actual_bot_name": actual_bot_name, + "container_name": container_name, + "bot_stopped": True, + "container_stopped": stop_container_response.get("success", True), + "archived": archive_locally or s3_bucket is not None, + "container_removed": remove_response.get("success", False) + } + } + + except Exception as e: + logging.error(f"Error in stop_and_archive_bot for {bot_name}: {str(e)}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/create-hummingbot-instance") +async def create_hummingbot_instance( + config: HummingbotInstanceConfig, + docker_manager: DockerService = Depends(get_docker_service) +): + """Create a new Hummingbot instance with the specified configuration.""" + logging.info(f"Creating hummingbot instance with config: {config}") + response = docker_manager.create_hummingbot_instance(config) + return response + + +@router.post("/deploy-v2-controllers") +async def deploy_v2_controllers( + deployment: V2ControllerDeployment, + docker_manager: DockerService = Depends(get_docker_service) +): + """ + Deploy a V2 strategy with controllers by generating the script config and creating the instance. + This endpoint simplifies the deployment process for V2 controller strategies. + """ + try: + # Generate unique script config filename with timestamp + timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") + script_config_filename = f"{deployment.instance_name}-{timestamp}.yml" + + # Create the script config content + script_config_content = { + "script_file_name": "v2_with_controllers.py", + "candles_config": [], + "markets": {}, + "controllers_config": deployment.controllers_config, + } + + # Add optional drawdown parameters if provided + if deployment.max_global_drawdown is not None: + script_config_content["max_global_drawdown"] = deployment.max_global_drawdown + if deployment.max_controller_drawdown is not None: + script_config_content["max_controller_drawdown"] = deployment.max_controller_drawdown + + # Save the script config to the scripts directory + scripts_dir = os.path.join("bots", "conf", "scripts") + os.makedirs(scripts_dir, exist_ok=True) + + script_config_path = os.path.join(scripts_dir, script_config_filename) + FileSystemUtil.dump_dict_to_yaml(script_config_path, script_config_content) + + logging.info(f"Generated script config: {script_config_filename} with content: {script_config_content}") + + # Create the HummingbotInstanceConfig with the generated script config + instance_config = HummingbotInstanceConfig( + instance_name=deployment.instance_name, + credentials_profile=deployment.credentials_profile, + image=deployment.image, + script="v2_with_controllers.py", + script_config=script_config_filename + ) + + # Deploy the instance using the existing method + response = docker_manager.create_hummingbot_instance(instance_config) + + if response.get("success"): + response["script_config_generated"] = script_config_filename + response["controllers_deployed"] = deployment.controllers_config + + return response + + except Exception as e: + logging.error(f"Error deploying V2 controllers: {str(e)}") + raise HTTPException(status_code=500, detail=str(e)) \ No newline at end of file diff --git a/routers/manage_broker_messages.py b/routers/manage_broker_messages.py deleted file mode 100644 index 3c7ed06d..00000000 --- a/routers/manage_broker_messages.py +++ /dev/null @@ -1,111 +0,0 @@ -from fastapi import APIRouter, HTTPException - -from config import BROKER_HOST, BROKER_PASSWORD, BROKER_PORT, BROKER_USERNAME -from models import StartBotAction, StopBotAction -from services.bots_orchestrator import BotsOrchestrator - -# Initialize the scheduler -router = APIRouter(tags=["Broker"]) - -# Log the broker configuration -import logging -logger = logging.getLogger(__name__) -logger.info(f"Broker config - Host: {BROKER_HOST}, Port: {BROKER_PORT}, Username: '{BROKER_USERNAME}', Has Password: {bool(BROKER_PASSWORD)}") - -bots_manager = BotsOrchestrator(broker_host=BROKER_HOST, broker_port=BROKER_PORT, broker_username=BROKER_USERNAME, - broker_password=BROKER_PASSWORD) - - -# Startup and shutdown will be handled by lifespan context -_startup_task_created = False - -@router.on_event("startup") -async def startup_event(): - global _startup_task_created - if not _startup_task_created: - bots_manager.start_update_active_bots_loop() - _startup_task_created = True - - -@router.on_event("shutdown") -async def shutdown_event(): - # Shutdown the scheduler on application exit - bots_manager.stop_update_active_bots_loop() - - -@router.get("/get-active-bots-status") -def get_active_bots_status(): - """Returns the cached status of all active bots.""" - return {"status": "success", "data": bots_manager.get_all_bots_status()} - - -@router.get("/mqtt-status") -def get_mqtt_status(): - """Get MQTT connection status and discovered bots.""" - mqtt_connected = bots_manager.mqtt_manager.is_connected - discovered_bots = bots_manager.mqtt_manager.get_discovered_bots() - active_bots = list(bots_manager.active_bots.keys()) - - # Check client state - client_state = "connected" if bots_manager.mqtt_manager.is_connected else "disconnected" - - return { - "status": "success", - "data": { - "mqtt_connected": mqtt_connected, - "discovered_bots": discovered_bots, - "active_bots": active_bots, - "broker_host": bots_manager.broker_host, - "broker_port": bots_manager.broker_port, - "broker_username": bots_manager.broker_username, - "client_state": client_state - } - } - -@router.get("/get-bot-status/{bot_name}") -def get_bot_status(bot_name: str): - response = bots_manager.get_bot_status(bot_name) - if not response: - raise HTTPException(status_code=404, detail="Bot not found") - return { - "status": "success", - "data": response - } - - -@router.get("/get-bot-history/{bot_name}") -async def get_bot_history( - bot_name: str, - days: int = 0, - verbose: bool = False, - precision: int = None, - timeout: float = 30.0 -): - """Get trading history for a bot with optional parameters.""" - response = await bots_manager.get_bot_history( - bot_name, - days=days, - verbose=verbose, - precision=precision, - timeout=timeout - ) - return {"status": "success", "response": response} - - -@router.post("/start-bot") -async def start_bot(action: StartBotAction): - response = await bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, - conf=action.conf, async_backend=action.async_backend) - return {"status": "success", "response": response} - - -@router.post("/stop-bot") -async def stop_bot(action: StopBotAction): - response = await bots_manager.stop_bot(action.bot_name, skip_order_cancellation=action.skip_order_cancellation, - async_backend=action.async_backend) - return {"status": "success", "response": response} - - - - - diff --git a/routers/manage_docker.py b/routers/manage_docker.py index 19cfb99a..af760ca6 100644 --- a/routers/manage_docker.py +++ b/routers/manage_docker.py @@ -1,47 +1,45 @@ import logging import os -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter, HTTPException, Depends -from models import HummingbotInstanceConfig, ImageName +from models import ImageName from utils.bot_archiver import BotArchiver from services.docker_service import DockerService +from deps import get_docker_service, get_bot_archiver router = APIRouter(tags=["Docker"]) -docker_manager = DockerService() -bot_archiver = BotArchiver(os.environ.get("AWS_API_KEY"), os.environ.get("AWS_SECRET_KEY"), - os.environ.get("S3_DEFAULT_BUCKET_NAME")) @router.get("/is-docker-running") -async def is_docker_running(): +async def is_docker_running(docker_manager: DockerService = Depends(get_docker_service)): return {"is_docker_running": docker_manager.is_docker_running()} @router.get("/available-images/{image_name}") -async def available_images(image_name: str): +async def available_images(image_name: str, docker_manager: DockerService = Depends(get_docker_service)): available_images = docker_manager.get_available_images() image_tags = [tag for image in available_images["images"] for tag in image.tags if image_name in tag] return {"available_images": image_tags} @router.get("/active-containers") -async def active_containers(): +async def active_containers(docker_manager: DockerService = Depends(get_docker_service)): return docker_manager.get_active_containers() @router.get("/exited-containers") -async def exited_containers(): +async def exited_containers(docker_manager: DockerService = Depends(get_docker_service)): return docker_manager.get_exited_containers() @router.post("/clean-exited-containers") -async def clean_exited_containers(): +async def clean_exited_containers(docker_manager: DockerService = Depends(get_docker_service)): return docker_manager.clean_exited_containers() @router.post("/remove-container/{container_name}") -async def remove_container(container_name: str, archive_locally: bool = True, s3_bucket: str = None): +async def remove_container(container_name: str, archive_locally: bool = True, s3_bucket: str = None, docker_manager: DockerService = Depends(get_docker_service), bot_archiver: BotArchiver = Depends(get_bot_archiver)): # Remove the container response = docker_manager.remove_container(container_name) # Form the instance directory path correctly @@ -59,24 +57,17 @@ async def remove_container(container_name: str, archive_locally: bool = True, s3 @router.post("/stop-container/{container_name}") -async def stop_container(container_name: str): +async def stop_container(container_name: str, docker_manager: DockerService = Depends(get_docker_service)): return docker_manager.stop_container(container_name) @router.post("/start-container/{container_name}") -async def start_container(container_name: str): +async def start_container(container_name: str, docker_manager: DockerService = Depends(get_docker_service)): return docker_manager.start_container(container_name) -@router.post("/create-hummingbot-instance") -async def create_hummingbot_instance(config: HummingbotInstanceConfig): - logging.info(f"Creating hummingbot instance with config: {config}") - response = docker_manager.create_hummingbot_instance(config) - return response - - @router.post("/pull-image/") -async def pull_image(image: ImageName): +async def pull_image(image: ImageName, docker_manager: DockerService = Depends(get_docker_service)): try: result = docker_manager.pull_image(image.image_name) return result From 681842b2b7bb7c370fb56ef37349f05405ef0c85 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sun, 8 Jun 2025 16:28:48 +0700 Subject: [PATCH 023/244] (feat) improve creation of docker containers --- services/docker_service.py | 46 +++++++++++++++++++++++++++++++++----- 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/services/docker_service.py b/services/docker_service.py index de0da63a..17701c31 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -97,11 +97,7 @@ def create_hummingbot_instance(self, config: HummingbotInstanceConfig): # Copy credentials to instance directory source_credentials_dir = os.path.join("bots", 'credentials', config.credentials_profile) - script_config_dir = os.path.join("bots", 'conf', 'scripts') - controllers_config_dir = os.path.join("bots", 'conf', 'controllers') destination_credentials_dir = os.path.join(instance_dir, 'conf') - destination_scripts_config_dir = os.path.join(instance_dir, 'conf', 'scripts') - destination_controllers_config_dir = os.path.join(instance_dir, 'conf', 'controllers') # Remove the destination directory if it already exists if os.path.exists(destination_credentials_dir): @@ -109,8 +105,46 @@ def create_hummingbot_instance(self, config: HummingbotInstanceConfig): # Copy the entire contents of source_credentials_dir to destination_credentials_dir shutil.copytree(source_credentials_dir, destination_credentials_dir) - shutil.copytree(script_config_dir, destination_scripts_config_dir) - shutil.copytree(controllers_config_dir, destination_controllers_config_dir) + + # Copy specific script config and referenced controllers if provided + if config.script_config: + script_config_dir = os.path.join("bots", 'conf', 'scripts') + controllers_config_dir = os.path.join("bots", 'conf', 'controllers') + destination_scripts_config_dir = os.path.join(instance_dir, 'conf', 'scripts') + destination_controllers_config_dir = os.path.join(instance_dir, 'conf', 'controllers') + + os.makedirs(destination_scripts_config_dir, exist_ok=True) + + # Copy the specific script config file + source_script_config_file = os.path.join(script_config_dir, config.script_config) + destination_script_config_file = os.path.join(destination_scripts_config_dir, config.script_config) + + if os.path.exists(source_script_config_file): + shutil.copy2(source_script_config_file, destination_script_config_file) + + # Load the script config to find referenced controllers + try: + script_config_content = FileSystemUtil.read_yaml_file(source_script_config_file) + controllers_list = script_config_content.get('controllers_config', []) + + # If there are controllers referenced, copy them + if controllers_list: + os.makedirs(destination_controllers_config_dir, exist_ok=True) + + for controller_file in controllers_list: + source_controller_file = os.path.join(controllers_config_dir, controller_file) + destination_controller_file = os.path.join(destination_controllers_config_dir, controller_file) + + if os.path.exists(source_controller_file): + shutil.copy2(source_controller_file, destination_controller_file) + logging.info(f"Copied controller config: {controller_file}") + else: + logging.warning(f"Controller config file {controller_file} not found in {controllers_config_dir}") + + except Exception as e: + logging.error(f"Error reading script config file {config.script_config}: {e}") + else: + logging.warning(f"Script config file {config.script_config} not found in {script_config_dir}") conf_file_path = f"{instance_dir}/conf/conf_client.yml" client_config = FileSystemUtil.read_yaml_file(conf_file_path) client_config['instance_id'] = instance_name From 3d150bc7663f6aa257a6a1fb29ffdbd5639614ac Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sun, 8 Jun 2025 16:28:54 +0700 Subject: [PATCH 024/244] (feat) add test module --- test/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 test/__init__.py diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 00000000..e69de29b From d621b9e5004c1bc5a48194ca6119816f8ea95ce3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sun, 8 Jun 2025 16:29:05 +0700 Subject: [PATCH 025/244] (feat) register new route --- main.py | 52 ++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 46 insertions(+), 6 deletions(-) diff --git a/main.py b/main.py index 4bd57bb5..a0ff83ca 100644 --- a/main.py +++ b/main.py @@ -9,11 +9,15 @@ from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.middleware.cors import CORSMiddleware -from config import LOGFIRE_ENVIRONMENT +from config import LOGFIRE_ENVIRONMENT, BROKER_HOST, BROKER_PASSWORD, BROKER_PORT, BROKER_USERNAME +from services.bots_orchestrator import BotsOrchestrator +from services.accounts_service import AccountsService +from services.docker_service import DockerService +from utils.bot_archiver import BotArchiver from routers import ( manage_accounts, manage_backtesting, - manage_broker_messages, + manage_bot_orchestration, manage_databases, manage_docker, manage_files, @@ -51,9 +55,37 @@ async def lifespan(app: FastAPI): Lifespan context manager for the FastAPI application. Handles startup and shutdown events. """ - # Startup logic + # Initialize services + bots_orchestrator = BotsOrchestrator( + broker_host=BROKER_HOST, + broker_port=BROKER_PORT, + broker_username=BROKER_USERNAME, + broker_password=BROKER_PASSWORD + ) + + accounts_service = AccountsService() + docker_service = DockerService() + bot_archiver = BotArchiver( + os.environ.get("AWS_API_KEY"), + os.environ.get("AWS_SECRET_KEY"), + os.environ.get("S3_DEFAULT_BUCKET_NAME") + ) + + # Store services in app state + app.state.bots_orchestrator = bots_orchestrator + app.state.accounts_service = accounts_service + app.state.docker_service = docker_service + app.state.bot_archiver = bot_archiver + + # Start services + bots_orchestrator.start_update_active_bots_loop() + accounts_service.start_update_account_state_loop() + yield - # Shutdown logic (add cleanup code here if needed) + + # Shutdown services + bots_orchestrator.stop_update_active_bots_loop() + accounts_service.stop_update_account_state_loop() # Initialize FastAPI with metadata and lifespan @@ -98,13 +130,21 @@ def auth_user( ) return credentials.username - # Include all routers with authentication app.include_router(manage_docker.router, dependencies=[Depends(auth_user)]) app.include_router(manage_accounts.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_broker_messages.router, dependencies=[Depends(auth_user)]) +app.include_router(manage_bot_orchestration.router, dependencies=[Depends(auth_user)]) app.include_router(manage_files.router, dependencies=[Depends(auth_user)]) app.include_router(manage_market_data.router, dependencies=[Depends(auth_user)]) app.include_router(manage_backtesting.router, dependencies=[Depends(auth_user)]) app.include_router(manage_databases.router, dependencies=[Depends(auth_user)]) app.include_router(manage_performance.router, dependencies=[Depends(auth_user)]) + +@app.get("/") +async def root(): + """API root endpoint returning basic information.""" + return { + "name": "Backend API", + "version": "0.2.0", + "status": "running", + } \ No newline at end of file From 183d82c1966bd0ae3aa61e0fa26fb71c6fd452c6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sun, 8 Jun 2025 16:29:16 +0700 Subject: [PATCH 026/244] (feat) add model for deploying v2 directly --- models.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/models.py b/models.py index 94aa10b3..e2f09a76 100644 --- a/models.py +++ b/models.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any, Dict, Optional, List from pydantic import BaseModel @@ -51,3 +51,12 @@ class ConfigureBotAction(BotAction): class ShortcutAction(BotAction): params: list + + +class V2ControllerDeployment(BaseModel): + instance_name: str + credentials_profile: str + controllers_config: List[str] # List of controller config files to use + max_global_drawdown: Optional[float] = None + max_controller_drawdown: Optional[float] = None + image: str = "hummingbot/hummingbot:latest" From 81e803a08fcc4a17653f1b2aa6eebb52c7a9e84d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 9 Jun 2025 16:03:23 +0700 Subject: [PATCH 027/244] (feat) add dependencies for injection --- deps.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 deps.py diff --git a/deps.py b/deps.py new file mode 100644 index 00000000..f1470a82 --- /dev/null +++ b/deps.py @@ -0,0 +1,25 @@ +from fastapi import Request +from services.bots_orchestrator import BotsOrchestrator +from services.accounts_service import AccountsService +from services.docker_service import DockerService +from utils.bot_archiver import BotArchiver + + +def get_bots_orchestrator(request: Request) -> BotsOrchestrator: + """Get BotsOrchestrator service from app state.""" + return request.app.state.bots_orchestrator + + +def get_accounts_service(request: Request) -> AccountsService: + """Get AccountsService from app state.""" + return request.app.state.accounts_service + + +def get_docker_service(request: Request) -> DockerService: + """Get DockerService from app state.""" + return request.app.state.docker_service + + +def get_bot_archiver(request: Request) -> BotArchiver: + """Get BotArchiver from app state.""" + return request.app.state.bot_archiver \ No newline at end of file From 76e8fd7fa37562c2e7b8f05f088927113b960910 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 15:46:05 +0800 Subject: [PATCH 028/244] (feat) initialize db module --- database/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 database/__init__.py diff --git a/database/__init__.py b/database/__init__.py new file mode 100644 index 00000000..6a7e0c88 --- /dev/null +++ b/database/__init__.py @@ -0,0 +1,5 @@ +from .models import AccountState, TokenState, Base +from .connection import AsyncDatabaseManager +from .repositories import AccountRepository + +__all__ = ["AccountState", "TokenState", "Base", "AsyncDatabaseManager", "AccountRepository"] \ No newline at end of file From 726c33d3ccc6ab67782cc2b4d7bef929ddc55fdf Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 15:46:14 +0800 Subject: [PATCH 029/244] (feat) add postgres db --- docker-compose.yml | 25 +++++++++++++++++++++++-- 1 file changed, 23 insertions(+), 2 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 5758652f..c176d84f 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: "3.9" - services: backend-api: container_name: backend-api @@ -14,8 +12,11 @@ services: - BROKER_PORT=1883 - USERNAME=admin - PASSWORD=admin + - DATABASE_URL=postgresql+asyncpg://hbot:backend-api@postgres:5432/backend_api networks: - emqx-bridge + depends_on: + - postgres emqx: container_name: hummingbot-broker image: emqx:5 @@ -47,6 +48,25 @@ services: interval: 5s timeout: 25s retries: 5 + postgres: + container_name: backend-postgres + image: postgres:15 + restart: unless-stopped + environment: + - POSTGRES_DB=backend_api + - POSTGRES_USER=hbot + - POSTGRES_PASSWORD=backend-api + volumes: + - postgres-data:/var/lib/postgresql/data + ports: + - "5432:5432" + networks: + - emqx-bridge + healthcheck: + test: ["CMD-SHELL", "pg_isready -U hbot -d backend_api"] + interval: 10s + timeout: 5s + retries: 5 networks: emqx-bridge: @@ -56,3 +76,4 @@ volumes: emqx-data: { } emqx-log: { } emqx-etc: { } + postgres-data: { } From bcf482d4c8c06ec843af0a4a43a8291ca7d374b7 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 15:46:22 +0800 Subject: [PATCH 030/244] (feat) add required dependencies --- environment.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/environment.yml b/environment.yml index 0d356462..e2c2634e 100644 --- a/environment.yml +++ b/environment.yml @@ -20,3 +20,7 @@ dependencies: - logfire[fastapi] - logfire[system-metrics] - aiomqtt>=2.0.0 + - sqlalchemy>=2.0.0 + - asyncpg + - psycopg2-binary + - greenlet From 7a6ee6bc2db6d64c622bec3f28fd47cdbcc197a1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 15:47:48 +0800 Subject: [PATCH 031/244] (feat) add db to config --- config.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/config.py b/config.py index 40fbd9f6..18ab0270 100644 --- a/config.py +++ b/config.py @@ -14,3 +14,6 @@ PASSWORD_VERIFICATION_PATH = "bots/credentials/master_account/.password_verification" BANNED_TOKENS = os.getenv("BANNED_TOKENS", "NAV,ARS,ETHW,ETHF").split(",") LOGFIRE_ENVIRONMENT = os.getenv("LOGFIRE_ENVIRONMENT", "dev") + +# Database configuration +DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api") From 57d30035044ecd8eabc1456879540da165fe8071 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 15:47:56 +0800 Subject: [PATCH 032/244] (feat) exclude files in dockerignore --- .dockerignore | 63 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/.dockerignore b/.dockerignore index 2eea525d..8e571199 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1,62 @@ -.env \ No newline at end of file +# Python cache +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python + +# Virtual environments +venv/ +ENV/ +env/ +.venv + +# IDEs +.idea/ +.vscode/ +*.swp +*.swo +*~ + +# OS files +.DS_Store +Thumbs.db + +# Git +.git/ +.gitignore + +# Documentation +*.md +docs/ + +# Tests +test/ +tests/ +pytest_cache/ +.coverage +.pytest_cache/ + +# Development files +.env +.env.local +*.log + +# Build artifacts +build/ +dist/ +*.egg-info/ + +# Docker files (don't copy themselves) +Dockerfile* +docker-compose*.yml +.dockerignore + +# Bot data that should be mounted as volumes +bots/instances/* +bots/data/* +bots/credentials/* +!bots/credentials/master_account/ + +# Archives +bots/archived/ \ No newline at end of file From 93c79057f9106c168b7eb126877f53668c38ad0d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 16:40:22 +0800 Subject: [PATCH 033/244] (feat) rename routers --- Dockerfile | 56 ++- routers/accounts.py | 454 ++++++++++++++++++ .../{manage_backtesting.py => backtesting.py} | 0 ..._orchestration.py => bot_orchestration.py} | 0 routers/{manage_databases.py => databases.py} | 0 routers/{manage_docker.py => docker.py} | 0 routers/{manage_files.py => files.py} | 0 routers/manage_accounts.py | 99 ---- .../{manage_market_data.py => market_data.py} | 0 .../{manage_performance.py => performance.py} | 0 10 files changed, 497 insertions(+), 112 deletions(-) create mode 100644 routers/accounts.py rename routers/{manage_backtesting.py => backtesting.py} (100%) rename routers/{manage_bot_orchestration.py => bot_orchestration.py} (100%) rename routers/{manage_databases.py => databases.py} (100%) rename routers/{manage_docker.py => docker.py} (100%) rename routers/{manage_files.py => files.py} (100%) delete mode 100644 routers/manage_accounts.py rename routers/{manage_market_data.py => market_data.py} (100%) rename routers/{manage_performance.py => performance.py} (100%) diff --git a/Dockerfile b/Dockerfile index 8b310580..206f8616 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,22 +1,52 @@ -# Start from a base image with Miniconda installed -FROM continuumio/miniconda3 +# Stage 1: Builder stage +FROM continuumio/miniconda3 AS builder -# Install system dependencies +# Install build dependencies RUN apt-get update && \ - apt-get install -y sudo libusb-1.0 python3-dev gcc && \ + apt-get install -y python3-dev gcc && \ rm -rf /var/lib/apt/lists/* -# Set the working directory in the container +# Set working directory +WORKDIR /build + +# Copy only the environment file first (for better layer caching) +COPY environment.yml . + +# Create the conda environment +RUN conda env create -f environment.yml && \ + conda clean -afy && \ + rm -rf /root/.cache/pip/* + +# Stage 2: Runtime stage +FROM continuumio/miniconda3-slim + +# Install only runtime dependencies +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + libusb-1.0-0 \ + && rm -rf /var/lib/apt/lists/* + +# Copy the conda environment from builder +COPY --from=builder /opt/conda/envs/backend-api /opt/conda/envs/backend-api + +# Set the working directory WORKDIR /backend-api -# Copy the current directory contents and the Conda environment file into the container -COPY . . +# Copy only necessary application files +COPY main.py config.py deps.py models.py ./ +COPY routers ./routers +COPY services ./services +COPY utils ./utils +COPY database ./database +COPY bots/controllers ./bots/controllers +COPY bots/scripts ./bots/scripts -# Create the environment from the environment.yml file -RUN conda env create -f environment.yml +# Create necessary directories +RUN mkdir -p bots/instances bots/conf bots/credentials bots/data -# Make RUN commands use the new environment -SHELL ["conda", "run", "-n", "backend-api", "/bin/bash", "-c"] +# Set environment variables to ensure conda env is used +ENV PATH="/opt/conda/envs/backend-api/bin:$PATH" +ENV CONDA_DEFAULT_ENV=backend-api -# The code to run when container is started -ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "backend-api", "uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] +# Run the application +ENTRYPOINT ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/routers/accounts.py b/routers/accounts.py new file mode 100644 index 00000000..4f639813 --- /dev/null +++ b/routers/accounts.py @@ -0,0 +1,454 @@ +from typing import Dict, List +from datetime import datetime + +from fastapi import APIRouter, HTTPException, Depends, Query +from hummingbot.client.settings import AllConnectorSettings +from starlette import status + +from services.accounts_service import AccountsService +from utils.file_system import FileSystemUtil +from deps import get_accounts_service +from models import PaginatedResponse + +router = APIRouter(tags=["Accounts"]) +file_system = FileSystemUtil(base_path="bots/credentials") + + +@router.get("/accounts-state", response_model=Dict[str, Dict[str, List[Dict]]]) +async def get_all_accounts_state(accounts_service: AccountsService = Depends(get_accounts_service)): + return accounts_service.get_accounts_state() + + +@router.get("/account-state-history", response_model=PaginatedResponse) +async def get_account_state_history( + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get the historical state of all accounts with pagination. + """ + try: + data, next_cursor, has_more = await accounts_service.load_account_state_history( + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor + } + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/available-connectors", response_model=List[str]) +async def available_connectors(): + return list(AllConnectorSettings.get_connector_settings().keys()) + + +@router.get("/connector-config-map/{connector_name}", response_model=List[str]) +async def get_connector_config_map(connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + return accounts_service.get_connector_config_map(connector_name) + + +@router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) +async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): + all_config_maps = {} + for connector in list(AllConnectorSettings.get_connector_settings().keys()): + all_config_maps[connector] = accounts_service.get_connector_config_map(connector) + return all_config_maps + + +@router.get("/list-accounts", response_model=List[str]) +async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): + return accounts_service.list_accounts() + + +@router.get("/list-credentials/{account_name}", response_model=List[str]) +async def list_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + try: + return accounts_service.list_credentials(account_name) + except FileNotFoundError as e: + raise HTTPException(status_code=404, detail=str(e)) + + +@router.post("/add-account", status_code=status.HTTP_201_CREATED) +async def add_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + try: + accounts_service.add_account(account_name) + return {"message": "Credential added successfully."} + except FileExistsError as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.post("/delete-account") +async def delete_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + try: + if account_name == "master_account": + raise HTTPException(status_code=400, detail="Cannot delete master account.") + accounts_service.delete_account(account_name) + return {"message": "Credential deleted successfully."} + except FileNotFoundError as e: + raise HTTPException(status_code=404, detail=str(e)) + + +@router.post("/delete-credential/{account_name}/{connector_name}") +async def delete_credential(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + try: + accounts_service.delete_credentials(account_name, connector_name) + return {"message": "Credential deleted successfully."} + except FileNotFoundError as e: + raise HTTPException(status_code=404, detail=str(e)) + + +@router.post("/add-connector-keys/{account_name}/{connector_name}", status_code=status.HTTP_201_CREATED) +async def add_connector_keys(account_name: str, connector_name: str, keys: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): + try: + await accounts_service.add_connector_keys(account_name, connector_name, keys) + return {"message": "Connector keys added successfully."} + except Exception as e: + accounts_service.delete_credentials(account_name, connector_name) + raise HTTPException(status_code=400, detail=str(e)) + + +# Account-specific routes +@router.get("/accounts/{account_name}/state", response_model=Dict[str, List[Dict]]) +async def get_account_state(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get current state of a specific account.""" + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + return state + + +@router.get("/accounts/{account_name}/state/history", response_model=PaginatedResponse) +async def get_account_history( + account_name: str, + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """Get historical state of a specific account with pagination.""" + data, next_cursor, has_more = await accounts_service.get_account_state_history( + account_name=account_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor, + "filters": { + "account_name": account_name, + "start_time": start_time.isoformat() if start_time else None, + "end_time": end_time.isoformat() if end_time else None + } + } + ) + + +@router.get("/accounts/{account_name}/value", response_model=Dict) +async def get_account_value(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get total portfolio value for a specific account.""" + value_data = await accounts_service.get_portfolio_value(account_name) + if account_name not in value_data["accounts"]: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + return { + "account_name": account_name, + "total_value": value_data["accounts"].get(account_name, 0) + } + + +@router.get("/accounts/{account_name}/tokens", response_model=List[Dict]) +async def get_account_tokens(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get all tokens held by a specific account.""" + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + tokens = {} + for connector_name, token_list in state.items(): + for token_info in token_list: + token = token_info["token"] + if token not in tokens: + tokens[token] = { + "token": token, + "total_units": 0, + "total_value": 0, + "average_price": 0, + "connectors": [] + } + tokens[token]["total_units"] += token_info["units"] + tokens[token]["total_value"] += token_info["value"] + tokens[token]["connectors"].append({ + "connector": connector_name, + "units": token_info["units"], + "value": token_info["value"] + }) + + # Calculate average price + for token_data in tokens.values(): + if token_data["total_units"] > 0: + token_data["average_price"] = token_data["total_value"] / token_data["total_units"] + + return list(tokens.values()) + + +# Connector-specific routes +@router.get("/accounts/{account_name}/connectors/{connector_name}/state", response_model=List[Dict]) +async def get_connector_state(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get current state of a specific connector.""" + state = await accounts_service.get_connector_current_state(account_name, connector_name) + if not state: + raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") + return state + + +@router.get("/accounts/{account_name}/connectors/{connector_name}/state/history", response_model=PaginatedResponse) +async def get_connector_history( + account_name: str, + connector_name: str, + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """Get historical state of a specific connector with pagination.""" + data, next_cursor, has_more = await accounts_service.get_connector_state_history( + account_name=account_name, + connector_name=connector_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor, + "filters": { + "account_name": account_name, + "connector_name": connector_name, + "start_time": start_time.isoformat() if start_time else None, + "end_time": end_time.isoformat() if end_time else None + } + } + ) + + +# Token-specific routes +@router.get("/tokens", response_model=List[str]) +async def get_all_tokens(accounts_service: AccountsService = Depends(get_accounts_service)): + """Get all unique tokens across all accounts and connectors.""" + return await accounts_service.get_all_unique_tokens() + + +@router.get("/tokens/{token}/state", response_model=List[Dict]) +async def get_token_state(token: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get current state of a specific token across all accounts.""" + state = await accounts_service.get_token_current_state(token) + if not state: + raise HTTPException(status_code=404, detail=f"Token '{token}' not found") + return state + + +@router.get("/tokens/{token}/accounts", response_model=List[Dict]) +async def get_token_accounts(token: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get all accounts that hold a specific token.""" + token_states = await accounts_service.get_token_current_state(token) + if not token_states: + raise HTTPException(status_code=404, detail=f"Token '{token}' not found") + + accounts = {} + for state in token_states: + account_name = state["account_name"] + if account_name not in accounts: + accounts[account_name] = { + "account_name": account_name, + "total_units": 0, + "total_value": 0, + "connectors": [] + } + accounts[account_name]["total_units"] += state["units"] + accounts[account_name]["total_value"] += state["value"] + accounts[account_name]["connectors"].append({ + "connector_name": state["connector_name"], + "units": state["units"], + "value": state["value"] + }) + + return list(accounts.values()) + + +@router.get("/accounts/{account_name}/tokens/{token}", response_model=Dict) +async def get_account_token_state(account_name: str, token: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """Get state of a specific token for a specific account.""" + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + token_data = { + "token": token, + "account_name": account_name, + "total_units": 0, + "total_value": 0, + "connectors": [] + } + + for connector_name, token_list in state.items(): + for token_info in token_list: + if token_info["token"] == token: + token_data["total_units"] += token_info["units"] + token_data["total_value"] += token_info["value"] + token_data["connectors"].append({ + "connector_name": connector_name, + "units": token_info["units"], + "value": token_info["value"], + "price": token_info["price"], + "available_units": token_info["available_units"] + }) + + if not token_data["connectors"]: + raise HTTPException(status_code=404, detail=f"Token '{token}' not found for account '{account_name}'") + + return token_data + + +# Portfolio aggregation routes +@router.get("/portfolio/value", response_model=Dict) +async def get_portfolio_value(accounts_service: AccountsService = Depends(get_accounts_service)): + """Get total portfolio value across all accounts.""" + return await accounts_service.get_portfolio_value() + + +@router.get("/portfolio/tokens", response_model=List[Dict]) +async def get_portfolio_tokens(accounts_service: AccountsService = Depends(get_accounts_service)): + """Get all tokens with aggregated holdings across all accounts.""" + all_states = accounts_service.get_accounts_state() + + tokens = {} + for account_name, connectors in all_states.items(): + for connector_name, token_list in connectors.items(): + for token_info in token_list: + token = token_info["token"] + if token not in tokens: + tokens[token] = { + "token": token, + "total_units": 0, + "total_value": 0, + "accounts": {} + } + tokens[token]["total_units"] += token_info["units"] + tokens[token]["total_value"] += token_info["value"] + + if account_name not in tokens[token]["accounts"]: + tokens[token]["accounts"][account_name] = { + "units": 0, + "value": 0 + } + tokens[token]["accounts"][account_name]["units"] += token_info["units"] + tokens[token]["accounts"][account_name]["value"] += token_info["value"] + + # Convert accounts dict to list for response + result = [] + for token, data in tokens.items(): + token_data = { + "token": token, + "total_units": data["total_units"], + "total_value": data["total_value"], + "average_price": data["total_value"] / data["total_units"] if data["total_units"] > 0 else 0, + "accounts": [ + { + "account_name": acc_name, + "units": acc_data["units"], + "value": acc_data["value"] + } + for acc_name, acc_data in data["accounts"].items() + ] + } + result.append(token_data) + + # Sort by total value descending + result.sort(key=lambda x: x["total_value"], reverse=True) + + return result + + +@router.get("/portfolio/distribution", response_model=Dict) +async def get_portfolio_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): + """Get portfolio distribution by token and exchange.""" + all_states = accounts_service.get_accounts_state() + portfolio_value = await accounts_service.get_portfolio_value() + total_value = portfolio_value["total_value"] + + if total_value == 0: + return { + "total_value": 0, + "by_token": {}, + "by_exchange": {}, + "by_account": {} + } + + # Distribution by token + by_token = {} + by_exchange = {} + + for account_name, connectors in all_states.items(): + for connector_name, token_list in connectors.items(): + if connector_name not in by_exchange: + by_exchange[connector_name] = {"value": 0, "percentage": 0} + + for token_info in token_list: + token = token_info["token"] + value = token_info["value"] + + if token not in by_token: + by_token[token] = {"value": 0, "percentage": 0} + + by_token[token]["value"] += value + by_exchange[connector_name]["value"] += value + + # Calculate percentages + for token_data in by_token.values(): + token_data["percentage"] = (token_data["value"] / total_value) * 100 + + for exchange_data in by_exchange.values(): + exchange_data["percentage"] = (exchange_data["value"] / total_value) * 100 + + # Account distribution from portfolio value + by_account = {} + for account_name, value in portfolio_value["accounts"].items(): + by_account[account_name] = { + "value": value, + "percentage": (value / total_value) * 100 if total_value > 0 else 0 + } + + return { + "total_value": total_value, + "by_token": by_token, + "by_exchange": by_exchange, + "by_account": by_account + } diff --git a/routers/manage_backtesting.py b/routers/backtesting.py similarity index 100% rename from routers/manage_backtesting.py rename to routers/backtesting.py diff --git a/routers/manage_bot_orchestration.py b/routers/bot_orchestration.py similarity index 100% rename from routers/manage_bot_orchestration.py rename to routers/bot_orchestration.py diff --git a/routers/manage_databases.py b/routers/databases.py similarity index 100% rename from routers/manage_databases.py rename to routers/databases.py diff --git a/routers/manage_docker.py b/routers/docker.py similarity index 100% rename from routers/manage_docker.py rename to routers/docker.py diff --git a/routers/manage_files.py b/routers/files.py similarity index 100% rename from routers/manage_files.py rename to routers/files.py diff --git a/routers/manage_accounts.py b/routers/manage_accounts.py deleted file mode 100644 index 4d45f023..00000000 --- a/routers/manage_accounts.py +++ /dev/null @@ -1,99 +0,0 @@ -from typing import Dict, List - -from fastapi import APIRouter, HTTPException, Depends -from hummingbot.client.settings import AllConnectorSettings -from starlette import status - -from services.accounts_service import AccountsService -from utils.file_system import FileSystemUtil -from deps import get_accounts_service - -router = APIRouter(tags=["Accounts"]) -file_system = FileSystemUtil(base_path="bots/credentials") - - -@router.get("/accounts-state", response_model=Dict[str, Dict[str, List[Dict]]]) -async def get_all_accounts_state(accounts_service: AccountsService = Depends(get_accounts_service)): - return accounts_service.get_accounts_state() - - -@router.get("/account-state-history", response_model=List[Dict]) -async def get_account_state_history(accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get the historical state of all accounts. - """ - try: - history = accounts_service.load_account_state_history() - return history - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@router.get("/available-connectors", response_model=List[str]) -async def available_connectors(): - return list(AllConnectorSettings.get_connector_settings().keys()) - - -@router.get("/connector-config-map/{connector_name}", response_model=List[str]) -async def get_connector_config_map(connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - return accounts_service.get_connector_config_map(connector_name) - - -@router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) -async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): - all_config_maps = {} - for connector in list(AllConnectorSettings.get_connector_settings().keys()): - all_config_maps[connector] = accounts_service.get_connector_config_map(connector) - return all_config_maps - - -@router.get("/list-accounts", response_model=List[str]) -async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): - return accounts_service.list_accounts() - - -@router.get("/list-credentials/{account_name}", response_model=List[str]) -async def list_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - try: - return accounts_service.list_credentials(account_name) - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -@router.post("/add-account", status_code=status.HTTP_201_CREATED) -async def add_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - try: - accounts_service.add_account(account_name) - return {"message": "Credential added successfully."} - except FileExistsError as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/delete-account") -async def delete_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - try: - if account_name == "master_account": - raise HTTPException(status_code=400, detail="Cannot delete master account.") - accounts_service.delete_account(account_name) - return {"message": "Credential deleted successfully."} - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -@router.post("/delete-credential/{account_name}/{connector_name}") -async def delete_credential(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - try: - accounts_service.delete_credentials(account_name, connector_name) - return {"message": "Credential deleted successfully."} - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -@router.post("/add-connector-keys/{account_name}/{connector_name}", status_code=status.HTTP_201_CREATED) -async def add_connector_keys(account_name: str, connector_name: str, keys: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): - try: - await accounts_service.add_connector_keys(account_name, connector_name, keys) - return {"message": "Connector keys added successfully."} - except Exception as e: - accounts_service.delete_credentials(account_name, connector_name) - raise HTTPException(status_code=400, detail=str(e)) diff --git a/routers/manage_market_data.py b/routers/market_data.py similarity index 100% rename from routers/manage_market_data.py rename to routers/market_data.py diff --git a/routers/manage_performance.py b/routers/performance.py similarity index 100% rename from routers/manage_performance.py rename to routers/performance.py From e5a06b49a11c38c978f0ec46040c4b0932786dc6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 16:40:29 +0800 Subject: [PATCH 034/244] (feat) adapt routers name --- main.py | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/main.py b/main.py index a0ff83ca..42c98398 100644 --- a/main.py +++ b/main.py @@ -15,14 +15,14 @@ from services.docker_service import DockerService from utils.bot_archiver import BotArchiver from routers import ( - manage_accounts, - manage_backtesting, - manage_bot_orchestration, - manage_databases, - manage_docker, - manage_files, - manage_market_data, - manage_performance, + accounts, + backtesting, + bot_orchestration, + databases, + docker, + files, + market_data, + performance, ) # Configure logging @@ -71,6 +71,9 @@ async def lifespan(app: FastAPI): os.environ.get("S3_DEFAULT_BUCKET_NAME") ) + # Initialize database + await accounts_service.ensure_db_initialized() + # Store services in app state app.state.bots_orchestrator = bots_orchestrator app.state.accounts_service = accounts_service @@ -86,6 +89,9 @@ async def lifespan(app: FastAPI): # Shutdown services bots_orchestrator.stop_update_active_bots_loop() accounts_service.stop_update_account_state_loop() + + # Close database connections + await accounts_service.db_manager.close() # Initialize FastAPI with metadata and lifespan From 58c24d22d36c3e9fcdc08ead4f4dd137229b6767 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 16:40:42 +0800 Subject: [PATCH 035/244] (feat) move models to module --- models.py | 62 ------------------------------------------------------- 1 file changed, 62 deletions(-) delete mode 100644 models.py diff --git a/models.py b/models.py deleted file mode 100644 index e2f09a76..00000000 --- a/models.py +++ /dev/null @@ -1,62 +0,0 @@ -from typing import Any, Dict, Optional, List - -from pydantic import BaseModel - - -class HummingbotInstanceConfig(BaseModel): - instance_name: str - credentials_profile: str - image: str = "hummingbot/hummingbot:latest" - script: Optional[str] = None - script_config: Optional[str] = None - - -class ImageName(BaseModel): - image_name: str - - -class Script(BaseModel): - name: str - content: str - - -class ScriptConfig(BaseModel): - name: str - content: Dict[str, Any] # YAML content represented as a dictionary - - -class BotAction(BaseModel): - bot_name: str - - -class StartBotAction(BotAction): - log_level: str = None - script: str = None - conf: str = None - async_backend: bool = False - - -class StopBotAction(BotAction): - skip_order_cancellation: bool = False - async_backend: bool = False - - -class ImportStrategyAction(BotAction): - strategy: str - - -class ConfigureBotAction(BotAction): - params: dict - - -class ShortcutAction(BotAction): - params: list - - -class V2ControllerDeployment(BaseModel): - instance_name: str - credentials_profile: str - controllers_config: List[str] # List of controller config files to use - max_global_drawdown: Optional[float] = None - max_controller_drawdown: Optional[float] = None - image: str = "hummingbot/hummingbot:latest" From 4fab45096740175d455bc815a8a1ae5659f0883f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 16:40:55 +0800 Subject: [PATCH 036/244] (feat) add models by domain --- models/__init__.py | 48 ++++++++++++++++++++++++++++++++++++++++++++ models/bot.py | 40 ++++++++++++++++++++++++++++++++++++ models/deployment.py | 19 ++++++++++++++++++ models/docker.py | 5 +++++ models/pagination.py | 37 ++++++++++++++++++++++++++++++++++ 5 files changed, 149 insertions(+) create mode 100644 models/__init__.py create mode 100644 models/bot.py create mode 100644 models/deployment.py create mode 100644 models/docker.py create mode 100644 models/pagination.py diff --git a/models/__init__.py b/models/__init__.py new file mode 100644 index 00000000..458318f3 --- /dev/null +++ b/models/__init__.py @@ -0,0 +1,48 @@ +# Bot models +from .bot import ( + Script, + ScriptConfig, + BotAction, + StartBotAction, + StopBotAction, + ImportStrategyAction, + ConfigureBotAction, + ShortcutAction, +) + +# Deployment models +from .deployment import V2ScriptDeployment, V2ControllerDeployment + +# Docker models +from .docker import DockerImage + +# Pagination models +from .pagination import PaginatedResponse, PaginationParams, TimeRangePaginationParams + +# Backward compatibility aliases +HummingbotInstanceConfig = V2ScriptDeployment # For backward compatibility +ImageName = DockerImage # For backward compatibility + +__all__ = [ + # Bot models + "Script", + "ScriptConfig", + "BotAction", + "StartBotAction", + "StopBotAction", + "ImportStrategyAction", + "ConfigureBotAction", + "ShortcutAction", + # Deployment models + "V2ScriptDeployment", + "V2ControllerDeployment", + # Docker models + "DockerImage", + # Pagination models + "PaginatedResponse", + "PaginationParams", + "TimeRangePaginationParams", + # Backward compatibility + "HummingbotInstanceConfig", # Alias for V2ScriptDeployment + "ImageName", # Alias for DockerImage +] \ No newline at end of file diff --git a/models/bot.py b/models/bot.py new file mode 100644 index 00000000..4932bcad --- /dev/null +++ b/models/bot.py @@ -0,0 +1,40 @@ +from typing import Any, Dict +from pydantic import BaseModel + + +class Script(BaseModel): + name: str + content: str + + +class ScriptConfig(BaseModel): + name: str + content: Dict[str, Any] # YAML content represented as a dictionary + + +class BotAction(BaseModel): + bot_name: str + + +class StartBotAction(BotAction): + log_level: str = None + script: str = None + conf: str = None + async_backend: bool = False + + +class StopBotAction(BotAction): + skip_order_cancellation: bool = False + async_backend: bool = False + + +class ImportStrategyAction(BotAction): + strategy: str + + +class ConfigureBotAction(BotAction): + params: dict + + +class ShortcutAction(BotAction): + params: list \ No newline at end of file diff --git a/models/deployment.py b/models/deployment.py new file mode 100644 index 00000000..402d683c --- /dev/null +++ b/models/deployment.py @@ -0,0 +1,19 @@ +from typing import Optional, List +from pydantic import BaseModel + + +class V2ScriptDeployment(BaseModel): + instance_name: str + credentials_profile: str + image: str = "hummingbot/hummingbot:latest" + script: Optional[str] = None + script_config: Optional[str] = None + + +class V2ControllerDeployment(BaseModel): + instance_name: str + credentials_profile: str + controllers_config: List[str] # List of controller config files to use + max_global_drawdown: Optional[float] = None + max_controller_drawdown: Optional[float] = None + image: str = "hummingbot/hummingbot:latest" \ No newline at end of file diff --git a/models/docker.py b/models/docker.py new file mode 100644 index 00000000..fca7f83e --- /dev/null +++ b/models/docker.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel + + +class DockerImage(BaseModel): + image_name: str \ No newline at end of file diff --git a/models/pagination.py b/models/pagination.py new file mode 100644 index 00000000..39bbc0de --- /dev/null +++ b/models/pagination.py @@ -0,0 +1,37 @@ +from datetime import datetime +from typing import Optional, List, Dict, Any +from pydantic import BaseModel, Field, ConfigDict + + +class PaginationParams(BaseModel): + """Common pagination parameters.""" + limit: int = Field(default=100, ge=1, le=1000, description="Number of items per page") + cursor: Optional[str] = Field(None, description="Cursor for next page") + + +class TimeRangePaginationParams(BaseModel): + """Time-based pagination parameters.""" + limit: int = Field(default=100, ge=1, le=1000, description="Number of items per page") + start_time: Optional[datetime] = Field(None, description="Start time for filtering") + end_time: Optional[datetime] = Field(None, description="End time for filtering") + cursor: Optional[str] = Field(None, description="Cursor for next page (ISO timestamp)") + + +class PaginatedResponse(BaseModel): + """Generic paginated response.""" + model_config = ConfigDict( + json_schema_extra={ + "example": { + "data": [...], + "pagination": { + "limit": 100, + "has_more": True, + "next_cursor": "2024-01-10T12:00:00", + "total_count": 500 + } + } + } + ) + + data: List[Dict[str, Any]] + pagination: Dict[str, Any] \ No newline at end of file From bd75cd69f9dff6ac3f7ba1aa318ad7d1f007c12f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:38:22 +0800 Subject: [PATCH 037/244] (feat) add description to models --- models/__init__.py | 8 ++++++- models/bot.py | 50 ++++++++++++++++++++++++++++++-------------- models/deployment.py | 24 ++++++++++----------- models/docker.py | 4 ++-- models/pagination.py | 2 +- 5 files changed, 56 insertions(+), 32 deletions(-) diff --git a/models/__init__.py b/models/__init__.py index 458318f3..ce7f213c 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -1,7 +1,10 @@ # Bot models from .bot import ( + ControllerType, Script, ScriptConfig, + Controller, + ControllerConfig, BotAction, StartBotAction, StopBotAction, @@ -25,8 +28,11 @@ __all__ = [ # Bot models + "ControllerType", "Script", - "ScriptConfig", + "ScriptConfig", + "Controller", + "ControllerConfig", "BotAction", "StartBotAction", "StopBotAction", diff --git a/models/bot.py b/models/bot.py index 4932bcad..3a447c1e 100644 --- a/models/bot.py +++ b/models/bot.py @@ -1,40 +1,58 @@ -from typing import Any, Dict -from pydantic import BaseModel +from typing import Any, Dict, Optional +from pydantic import BaseModel, Field +from enum import Enum + + +class ControllerType(str, Enum): + DIRECTIONAL_TRADING = "directional_trading" + MARKET_MAKING = "market_making" + GENERIC = "generic" class Script(BaseModel): - name: str - content: str + name: str = Field(description="Script name (without .py extension)") + content: str = Field(description="Python script content") class ScriptConfig(BaseModel): - name: str - content: Dict[str, Any] # YAML content represented as a dictionary + name: str = Field(description="Config name (without .yml extension)") + content: Dict[str, Any] = Field(description="YAML content as dictionary") + + +class Controller(BaseModel): + name: str = Field(description="Controller name (without .py extension)") + type: ControllerType = Field(description="Controller category") + content: str = Field(description="Python controller content") + + +class ControllerConfig(BaseModel): + name: str = Field(description="Config name (without .yml extension)") + content: Dict[str, Any] = Field(description="YAML content as dictionary") class BotAction(BaseModel): - bot_name: str + bot_name: str = Field(description="Name of the bot instance to act upon") class StartBotAction(BotAction): - log_level: str = None - script: str = None - conf: str = None - async_backend: bool = False + log_level: Optional[str] = Field(default=None, description="Logging level (DEBUG, INFO, WARNING, ERROR)") + script: Optional[str] = Field(default=None, description="Script name to run (without .py extension)") + conf: Optional[str] = Field(default=None, description="Configuration file name (without .yml extension)") + async_backend: bool = Field(default=False, description="Whether to run in async backend mode") class StopBotAction(BotAction): - skip_order_cancellation: bool = False - async_backend: bool = False + skip_order_cancellation: bool = Field(default=False, description="Whether to skip cancelling open orders when stopping") + async_backend: bool = Field(default=False, description="Whether to run in async backend mode") class ImportStrategyAction(BotAction): - strategy: str + strategy: str = Field(description="Name of the strategy to import") class ConfigureBotAction(BotAction): - params: dict + params: dict = Field(description="Configuration parameters to update") class ShortcutAction(BotAction): - params: list \ No newline at end of file + params: list = Field(description="List of shortcut parameters") \ No newline at end of file diff --git a/models/deployment.py b/models/deployment.py index 402d683c..c0d60abc 100644 --- a/models/deployment.py +++ b/models/deployment.py @@ -1,19 +1,19 @@ from typing import Optional, List -from pydantic import BaseModel +from pydantic import BaseModel, Field class V2ScriptDeployment(BaseModel): - instance_name: str - credentials_profile: str - image: str = "hummingbot/hummingbot:latest" - script: Optional[str] = None - script_config: Optional[str] = None + instance_name: str = Field(description="Unique name for the bot instance") + credentials_profile: str = Field(description="Name of the credentials profile to use") + image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") + script: Optional[str] = Field(default=None, description="Name of the script to run (without .py extension)") + script_config: Optional[str] = Field(default=None, description="Name of the script configuration file (without .yml extension)") class V2ControllerDeployment(BaseModel): - instance_name: str - credentials_profile: str - controllers_config: List[str] # List of controller config files to use - max_global_drawdown: Optional[float] = None - max_controller_drawdown: Optional[float] = None - image: str = "hummingbot/hummingbot:latest" \ No newline at end of file + instance_name: str = Field(description="Unique name for the bot instance") + credentials_profile: str = Field(description="Name of the credentials profile to use") + controllers_config: List[str] = Field(description="List of controller configuration files to use (without .yml extension)") + max_global_drawdown: Optional[float] = Field(default=None, description="Maximum allowed global drawdown percentage (0.0-1.0)") + max_controller_drawdown: Optional[float] = Field(default=None, description="Maximum allowed per-controller drawdown percentage (0.0-1.0)") + image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") \ No newline at end of file diff --git a/models/docker.py b/models/docker.py index fca7f83e..b18fb768 100644 --- a/models/docker.py +++ b/models/docker.py @@ -1,5 +1,5 @@ -from pydantic import BaseModel +from pydantic import BaseModel, Field class DockerImage(BaseModel): - image_name: str \ No newline at end of file + image_name: str = Field(description="Docker image name with optional tag (e.g., 'hummingbot/hummingbot:latest')") \ No newline at end of file diff --git a/models/pagination.py b/models/pagination.py index 39bbc0de..67cfe6f4 100644 --- a/models/pagination.py +++ b/models/pagination.py @@ -22,7 +22,7 @@ class PaginatedResponse(BaseModel): model_config = ConfigDict( json_schema_extra={ "example": { - "data": [...], + "data": [], "pagination": { "limit": 100, "has_more": True, From f68e666846eb497fd61ce4d15669e7d690802345 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:54:36 +0800 Subject: [PATCH 038/244] (feat) remove legacy file --- routers/files.py | 209 ----------------------------------------------- 1 file changed, 209 deletions(-) delete mode 100644 routers/files.py diff --git a/routers/files.py b/routers/files.py deleted file mode 100644 index 058b72c7..00000000 --- a/routers/files.py +++ /dev/null @@ -1,209 +0,0 @@ -import json -from typing import Dict, List - -import yaml -from fastapi import APIRouter, File, HTTPException, UploadFile -from starlette import status - -from models import Script, ScriptConfig -from utils.file_system import FileSystemUtil - -router = APIRouter(tags=["Files Management"]) - -file_system = FileSystemUtil() - - -@router.get("/list-scripts", response_model=List[str]) -async def list_scripts(): - return file_system.list_files('scripts') - - -@router.get("/list-scripts-configs", response_model=List[str]) -async def list_scripts_configs(): - return file_system.list_files('conf/scripts') - - -@router.get("/script-config/{script_name}", response_model=dict) -async def get_script_config(script_name: str): - """ - Retrieves the configuration parameters for a given script. - :param script_name: The name of the script. - :return: JSON containing the configuration parameters. - """ - config_class = file_system.load_script_config_class(script_name) - if config_class is None: - raise HTTPException(status_code=404, detail="Script configuration class not found") - - # Extracting fields and default values - config_fields = {field.name: field.default for field in config_class.__fields__.values()} - return json.loads(json.dumps(config_fields, default=str)) # Handling non-serializable types like Decimal - - -@router.get("/list-controllers", response_model=dict) -async def list_controllers(): - directional_trading_controllers = [file for file in file_system.list_files('controllers/directional_trading') if - file != "__init__.py"] - market_making_controllers = [file for file in file_system.list_files('controllers/market_making') if - file != "__init__.py"] - generic_controllers = [file for file in file_system.list_files('controllers/generic') if file != "__init__.py"] - - return {"directional_trading": directional_trading_controllers, - "market_making": market_making_controllers, - "generic": generic_controllers} - -@router.get("/controller-config-pydantic/{controller_type}/{controller_name}", response_model=dict) -async def get_controller_config_pydantic(controller_type: str, controller_name: str): - """ - Retrieves the configuration parameters for a given controller. - :param controller_name: The name of the controller. - :return: JSON containing the configuration parameters. - """ - config_class = file_system.load_controller_config_class(controller_type, controller_name) - if config_class is None: - raise HTTPException(status_code=404, detail="Controller configuration class not found") - - # Extracting fields and default values - config_fields = {name: field.default for name, field in config_class.model_fields.items()} - return json.loads(json.dumps(config_fields, default=str)) - - -@router.get("/list-controllers-configs", response_model=List[str]) -async def list_controllers_configs(): - return file_system.list_files('conf/controllers') - - -@router.get("/controller-config/{controller_name}", response_model=dict) -async def get_controller_config(controller_name: str): - config = file_system.read_yaml_file(f"bots/conf/controllers/{controller_name}.yml") - return config - - -@router.get("/all-controller-configs", response_model=List[dict]) -async def get_all_controller_configs(): - configs = [] - for controller in file_system.list_files('conf/controllers'): - config = file_system.read_yaml_file(f"bots/conf/controllers/{controller}") - configs.append(config) - return configs - - -@router.get("/all-controller-configs/bot/{bot_name}", response_model=List[dict]) -async def get_all_controller_configs_for_bot(bot_name: str): - configs = [] - bots_config_path = f"instances/{bot_name}/conf/controllers" - if not file_system.path_exists(bots_config_path): - raise HTTPException(status_code=400, detail="Bot not found.") - for controller in file_system.list_files(bots_config_path): - config = file_system.read_yaml_file(f"bots/{bots_config_path}/{controller}") - configs.append(config) - return configs - - -@router.post("/update-controller-config/bot/{bot_name}/{controller_id}") -async def update_controller_config(bot_name: str, controller_id: str, config: Dict): - bots_config_path = f"instances/{bot_name}/conf/controllers" - if not file_system.path_exists(bots_config_path): - raise HTTPException(status_code=400, detail="Bot not found.") - current_config = file_system.read_yaml_file(f"bots/{bots_config_path}/{controller_id}.yml") - current_config.update(config) - file_system.dump_dict_to_yaml(f"bots/{bots_config_path}/{controller_id}.yml", current_config) - return {"message": "Controller configuration updated successfully."} - - -@router.post("/add-script", status_code=status.HTTP_201_CREATED) -async def add_script(script: Script, override: bool = False): - try: - file_system.add_file('scripts', script.name + '.py', script.content, override) - return {"message": "Script added successfully."} - except FileExistsError as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/upload-script") -async def upload_script(config_file: UploadFile = File(...), override: bool = False): - try: - contents = await config_file.read() - file_system.add_file('scripts', config_file.filename, contents.decode(), override) - return {"message": "Script uploaded successfully."} - except FileExistsError as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/add-script-config", status_code=status.HTTP_201_CREATED) -async def add_script_config(config: ScriptConfig): - try: - yaml_content = yaml.dump(config.content) - - file_system.add_file('conf/scripts', config.name + '.yml', yaml_content, override=True) - return {"message": "Script configuration uploaded successfully."} - except Exception as e: # Consider more specific exception handling - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/upload-script-config") -async def upload_script_config(config_file: UploadFile = File(...), override: bool = False): - try: - contents = await config_file.read() - file_system.add_file('conf/scripts', config_file.filename, contents.decode(), override) - return {"message": "Script configuration uploaded successfully."} - except FileExistsError as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/add-controller-config", status_code=status.HTTP_201_CREATED) -async def add_controller_config(config: ScriptConfig): - try: - yaml_content = yaml.dump(config.content) - - file_system.add_file('conf/controllers', config.name + '.yml', yaml_content, override=True) - return {"message": "Controller configuration uploaded successfully."} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/upload-controller-config") -async def upload_controller_config(config_file: UploadFile = File(...), override: bool = False): - try: - contents = await config_file.read() - file_system.add_file('conf/controllers', config_file.filename, contents.decode(), override) - return {"message": "Controller configuration uploaded successfully."} - except FileExistsError as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.post("/delete-controller-config", status_code=status.HTTP_200_OK) -async def delete_controller_config(config_name: str): - try: - file_system.delete_file('conf/controllers', config_name) - return {"message": f"Controller configuration {config_name} deleted successfully."} - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -@router.post("/delete-script-config", status_code=status.HTTP_200_OK) -async def delete_script_config(config_name: str): - try: - file_system.delete_file('conf/scripts', config_name) - return {"message": f"Script configuration {config_name} deleted successfully."} - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -@router.post("/delete-all-controller-configs", status_code=status.HTTP_200_OK) -async def delete_all_controller_configs(): - try: - for file in file_system.list_files('conf/controllers'): - file_system.delete_file('conf/controllers', file) - return {"message": "All controller configurations deleted successfully."} - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) - - -@router.post("/delete-all-script-configs", status_code=status.HTTP_200_OK) -async def delete_all_script_configs(): - try: - for file in file_system.list_files('conf/scripts'): - file_system.delete_file('conf/scripts', file) - return {"message": "All script configurations deleted successfully."} - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) From b4a8fa45df886b3e2c4973af4e4f6e98d019f975 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:55:02 +0800 Subject: [PATCH 039/244] (feat) refactor and simplify routes --- routers/accounts.py | 12 +-- routers/backtesting.py | 2 +- routers/bot_orchestration.py | 10 +- routers/controllers.py | 180 +++++++++++++++++++++++++++++++++++ routers/databases.py | 12 +-- routers/docker.py | 4 +- routers/market_data.py | 2 +- routers/performance.py | 4 +- routers/scripts.py | 101 ++++++++++++++++++++ 9 files changed, 304 insertions(+), 23 deletions(-) create mode 100644 routers/controllers.py create mode 100644 routers/scripts.py diff --git a/routers/accounts.py b/routers/accounts.py index 4f639813..5e1dc5b1 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -10,16 +10,16 @@ from deps import get_accounts_service from models import PaginatedResponse -router = APIRouter(tags=["Accounts"]) +router = APIRouter(tags=["Accounts"], prefix="/accounts") file_system = FileSystemUtil(base_path="bots/credentials") -@router.get("/accounts-state", response_model=Dict[str, Dict[str, List[Dict]]]) +@router.get("/state", response_model=Dict[str, Dict[str, List[Dict]]]) async def get_all_accounts_state(accounts_service: AccountsService = Depends(get_accounts_service)): return accounts_service.get_accounts_state() -@router.get("/account-state-history", response_model=PaginatedResponse) +@router.get("/history", response_model=PaginatedResponse) async def get_account_state_history( limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), @@ -51,7 +51,7 @@ async def get_account_state_history( raise HTTPException(status_code=500, detail=str(e)) -@router.get("/available-connectors", response_model=List[str]) +@router.get("/connectors", response_model=List[str]) async def available_connectors(): return list(AllConnectorSettings.get_connector_settings().keys()) @@ -69,12 +69,12 @@ async def get_all_connectors_config_map(accounts_service: AccountsService = Depe return all_config_maps -@router.get("/list-accounts", response_model=List[str]) +@router.get("/", response_model=List[str]) async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): return accounts_service.list_accounts() -@router.get("/list-credentials/{account_name}", response_model=List[str]) +@router.get("/{account_name}/credentials", response_model=List[str]) async def list_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): try: return accounts_service.list_credentials(account_name) diff --git a/routers/backtesting.py b/routers/backtesting.py index 27457431..16de5a4b 100644 --- a/routers/backtesting.py +++ b/routers/backtesting.py @@ -7,7 +7,7 @@ from config import CONTROLLERS_MODULE, CONTROLLERS_PATH -router = APIRouter(tags=["Backtesting"]) +router = APIRouter(tags=["Backtesting"], prefix="/backtesting") candles_factory = CandlesFactory() backtesting_engine = BacktestingEngineBase() diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index a9374d62..deff7bf1 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -11,16 +11,16 @@ from utils.file_system import FileSystemUtil from utils.bot_archiver import BotArchiver -router = APIRouter(tags=["Bot Orchestration"]) +router = APIRouter(tags=["Bot Orchestration"], prefix="/bot-orchestration") -@router.get("/get-active-bots-status") +@router.get("/status") def get_active_bots_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): """Returns the cached status of all active bots.""" return {"status": "success", "data": bots_manager.get_all_bots_status()} -@router.get("/mqtt-status") +@router.get("/mqtt") def get_mqtt_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): """Get MQTT connection status and discovered bots.""" mqtt_connected = bots_manager.mqtt_manager.is_connected @@ -44,7 +44,7 @@ def get_mqtt_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrat } -@router.get("/get-bot-status/{bot_name}") +@router.get("/{bot_name}/status") def get_bot_status(bot_name: str, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): response = bots_manager.get_bot_status(bot_name) if not response: @@ -55,7 +55,7 @@ def get_bot_status(bot_name: str, bots_manager: BotsOrchestrator = Depends(get_b } -@router.get("/get-bot-history/{bot_name}") +@router.get("/{bot_name}/history") async def get_bot_history( bot_name: str, days: int = 0, diff --git a/routers/controllers.py b/routers/controllers.py new file mode 100644 index 00000000..3e77e95e --- /dev/null +++ b/routers/controllers.py @@ -0,0 +1,180 @@ +import json +import yaml +from typing import Dict, List + +from fastapi import APIRouter, HTTPException +from starlette import status + +from models import Controller, ControllerConfig, ControllerType +from utils.file_system import FileSystemUtil + +router = APIRouter(tags=["Controllers"], prefix="/controllers") +file_system = FileSystemUtil() + + +@router.get("/", response_model=Dict[str, List[str]]) +async def list_controllers(): + """List all controllers organized by type.""" + result = {} + for controller_type in ControllerType: + try: + files = file_system.list_files(f'controllers/{controller_type.value}') + result[controller_type.value] = [ + f.replace('.py', '') for f in files + if f.endswith('.py') and f != "__init__.py" + ] + except FileNotFoundError: + result[controller_type.value] = [] + return result + + +@router.get("/{controller_type}", response_model=List[str]) +async def list_controllers_by_type(controller_type: ControllerType): + """List controllers of a specific type.""" + try: + files = file_system.list_files(f'controllers/{controller_type.value}') + return [f.replace('.py', '') for f in files if f.endswith('.py') and f != "__init__.py"] + except FileNotFoundError: + return [] + + +@router.get("/{controller_type}/{controller_name}", response_model=Dict[str, str]) +async def get_controller(controller_type: ControllerType, controller_name: str): + """Get controller content by type and name.""" + try: + content = file_system.read_file(f"controllers/{controller_type.value}/{controller_name}.py") + return { + "name": controller_name, + "type": controller_type.value, + "content": content + } + except FileNotFoundError: + raise HTTPException( + status_code=404, + detail=f"Controller '{controller_name}' not found in '{controller_type.value}'" + ) + + +@router.post("/{controller_type}", status_code=status.HTTP_201_CREATED) +async def create_or_update_controller(controller_type: ControllerType, controller: Controller): + """Create or update a controller.""" + if controller.type != controller_type: + raise HTTPException( + status_code=400, + detail=f"Controller type mismatch: URL has '{controller_type}', body has '{controller.type}'" + ) + + try: + file_system.add_file( + f'controllers/{controller_type.value}', + f"{controller.name}.py", + controller.content, + override=True + ) + return {"message": f"Controller '{controller.name}' saved successfully in '{controller_type.value}'"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.delete("/{controller_type}/{controller_name}") +async def delete_controller(controller_type: ControllerType, controller_name: str): + """Delete a controller.""" + try: + file_system.delete_file(f'controllers/{controller_type.value}', f"{controller_name}.py") + return {"message": f"Controller '{controller_name}' deleted successfully from '{controller_type.value}'"} + except FileNotFoundError: + raise HTTPException( + status_code=404, + detail=f"Controller '{controller_name}' not found in '{controller_type.value}'" + ) + + +# Controller Configuration endpoints +@router.get("/{controller_name}/config", response_model=Dict) +async def get_controller_config(controller_name: str): + """Get controller configuration.""" + try: + config = file_system.read_yaml_file(f"bots/conf/controllers/{controller_name}.yml") + return config + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Configuration for controller '{controller_name}' not found") + + +@router.get("/{controller_type}/{controller_name}/config/template", response_model=Dict) +async def get_controller_config_template(controller_type: ControllerType, controller_name: str): + """Get controller configuration template with default values.""" + config_class = file_system.load_controller_config_class(controller_type.value, controller_name) + if config_class is None: + raise HTTPException( + status_code=404, + detail=f"Controller configuration class for '{controller_name}' not found" + ) + + # Extract fields and default values + config_fields = {name: field.default for name, field in config_class.model_fields.items()} + return json.loads(json.dumps(config_fields, default=str)) + + +@router.post("/{controller_name}/config", status_code=status.HTTP_201_CREATED) +async def create_or_update_controller_config(controller_name: str, config: Dict): + """Create or update controller configuration.""" + try: + yaml_content = yaml.dump(config, default_flow_style=False) + file_system.add_file('conf/controllers', f"{controller_name}.yml", yaml_content, override=True) + return {"message": f"Configuration for controller '{controller_name}' saved successfully"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.delete("/{controller_name}/config") +async def delete_controller_config(controller_name: str): + """Delete controller configuration.""" + try: + file_system.delete_file('conf/controllers', f"{controller_name}.yml") + return {"message": f"Configuration for controller '{controller_name}' deleted successfully"} + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Configuration for controller '{controller_name}' not found") + + +@router.get("/configs/", response_model=List[str]) +async def list_controller_configs(): + """List all controller configurations.""" + return [f.replace('.yml', '') for f in file_system.list_files('conf/controllers') if f.endswith('.yml')] + + +# Bot-specific controller config endpoints +@router.get("/bots/{bot_name}/configs", response_model=List[Dict]) +async def get_bot_controller_configs(bot_name: str): + """Get all controller configurations for a specific bot.""" + bots_config_path = f"instances/{bot_name}/conf/controllers" + if not file_system.path_exists(bots_config_path): + raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") + + configs = [] + for controller_file in file_system.list_files(bots_config_path): + if controller_file.endswith('.yml'): + config = file_system.read_yaml_file(f"bots/{bots_config_path}/{controller_file}") + config['_config_name'] = controller_file.replace('.yml', '') + configs.append(config) + return configs + + +@router.post("/bots/{bot_name}/{controller_name}/config") +async def update_bot_controller_config(bot_name: str, controller_name: str, config: Dict): + """Update controller configuration for a specific bot.""" + bots_config_path = f"instances/{bot_name}/conf/controllers" + if not file_system.path_exists(bots_config_path): + raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") + + try: + current_config = file_system.read_yaml_file(f"bots/{bots_config_path}/{controller_name}.yml") + current_config.update(config) + file_system.dump_dict_to_yaml(f"bots/{bots_config_path}/{controller_name}.yml", current_config) + return {"message": f"Controller configuration for bot '{bot_name}' updated successfully"} + except FileNotFoundError: + raise HTTPException( + status_code=404, + detail=f"Controller configuration '{controller_name}' not found for bot '{bot_name}'" + ) + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) \ No newline at end of file diff --git a/routers/databases.py b/routers/databases.py index 9ec1a01f..5973dcc8 100644 --- a/routers/databases.py +++ b/routers/databases.py @@ -10,16 +10,16 @@ from utils.file_system import FileSystemUtil -router = APIRouter(tags=["Databases"]) +router = APIRouter(tags=["Databases"], prefix="/databases") file_system = FileSystemUtil() -@router.post("/list-databases", response_model=List[str]) +@router.get("/", response_model=List[str]) async def list_databases(): return file_system.list_databases() -@router.post("/read-databases", response_model=List[Dict[str, Any]]) +@router.post("/read", response_model=List[Dict[str, Any]]) async def read_databases(db_paths: List[str] = None): dbs = [] for db_path in db_paths: @@ -51,7 +51,7 @@ async def read_databases(db_paths: List[str] = None): return dbs -@router.post("/create-checkpoint", response_model=Dict[str, Any]) +@router.post("/checkpoint", response_model=Dict[str, Any]) async def create_checkpoint(db_paths: List[str]): try: dbs = await read_databases(db_paths) @@ -76,12 +76,12 @@ async def create_checkpoint(db_paths: List[str]): return {"message": f"Error: {str(e)}"} -@router.post("/list-checkpoints", response_model=List[str]) +@router.get("/checkpoints", response_model=List[str]) async def list_checkpoints(full_path: bool): return file_system.list_checkpoints(full_path) -@router.post("/load-checkpoint") +@router.post("/checkpoints/load") async def load_checkpoint(checkpoint_path: str): try: etl = ETLPerformance(checkpoint_path) diff --git a/routers/docker.py b/routers/docker.py index af760ca6..3a75f926 100644 --- a/routers/docker.py +++ b/routers/docker.py @@ -8,10 +8,10 @@ from services.docker_service import DockerService from deps import get_docker_service, get_bot_archiver -router = APIRouter(tags=["Docker"]) +router = APIRouter(tags=["Docker"], prefix="/docker") -@router.get("/is-docker-running") +@router.get("/running") async def is_docker_running(docker_manager: DockerService = Depends(get_docker_service)): return {"is_docker_running": docker_manager.is_docker_running()} diff --git a/routers/market_data.py b/routers/market_data.py index 933cf093..38aadf8f 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -4,7 +4,7 @@ from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig -router = APIRouter(tags=["Market"]) +router = APIRouter(tags=["Market"], prefix="/market-data") candles_factory = CandlesFactory() diff --git a/routers/performance.py b/routers/performance.py index 81a51bf1..81ae9ad9 100644 --- a/routers/performance.py +++ b/routers/performance.py @@ -5,10 +5,10 @@ from utils.etl_databases import PerformanceDataSource -router = APIRouter(tags=["Performance"]) +router = APIRouter(tags=["Performance"], prefix="/performance") -@router.post("/get-performance-results") +@router.post("/results") async def get_performance_results(payload: Dict[str, Any]): executors = payload.get("executors") data_source = PerformanceDataSource(executors) diff --git a/routers/scripts.py b/routers/scripts.py new file mode 100644 index 00000000..c312c168 --- /dev/null +++ b/routers/scripts.py @@ -0,0 +1,101 @@ +import json +import yaml +from typing import Dict, List + +from fastapi import APIRouter, HTTPException +from starlette import status + +from models import Script, ScriptConfig +from utils.file_system import FileSystemUtil + +router = APIRouter(tags=["Scripts"], prefix="/scripts") +file_system = FileSystemUtil() + + +@router.get("/", response_model=List[str]) +async def list_scripts(): + """List all available scripts.""" + return [f.replace('.py', '') for f in file_system.list_files('scripts') if f.endswith('.py')] + + +@router.get("/{script_name}", response_model=Dict[str, str]) +async def get_script(script_name: str): + """Get script content by name.""" + try: + content = file_system.read_file(f"scripts/{script_name}.py") + return { + "name": script_name, + "content": content + } + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") + + +@router.post("/", status_code=status.HTTP_201_CREATED) +async def create_or_update_script(script: Script): + """Create or update a script.""" + try: + file_system.add_file('scripts', f"{script.name}.py", script.content, override=True) + return {"message": f"Script '{script.name}' saved successfully"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.delete("/{script_name}") +async def delete_script(script_name: str): + """Delete a script.""" + try: + file_system.delete_file('scripts', f"{script_name}.py") + return {"message": f"Script '{script_name}' deleted successfully"} + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") + + +# Script Configuration endpoints +@router.get("/{script_name}/config", response_model=Dict) +async def get_script_config(script_name: str): + """Get script configuration.""" + try: + config = file_system.read_yaml_file(f"bots/conf/scripts/{script_name}.yml") + return config + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Configuration for script '{script_name}' not found") + + +@router.get("/{script_name}/config/template", response_model=Dict) +async def get_script_config_template(script_name: str): + """Get script configuration template with default values.""" + config_class = file_system.load_script_config_class(script_name) + if config_class is None: + raise HTTPException(status_code=404, detail=f"Script configuration class for '{script_name}' not found") + + # Extract fields and default values + config_fields = {field.name: field.default for field in config_class.__fields__.values()} + return json.loads(json.dumps(config_fields, default=str)) + + +@router.post("/{script_name}/config", status_code=status.HTTP_201_CREATED) +async def create_or_update_script_config(script_name: str, config: Dict): + """Create or update script configuration.""" + try: + yaml_content = yaml.dump(config, default_flow_style=False) + file_system.add_file('conf/scripts', f"{script_name}.yml", yaml_content, override=True) + return {"message": f"Configuration for script '{script_name}' saved successfully"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.delete("/{script_name}/config") +async def delete_script_config(script_name: str): + """Delete script configuration.""" + try: + file_system.delete_file('conf/scripts', f"{script_name}.yml") + return {"message": f"Configuration for script '{script_name}' deleted successfully"} + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Configuration for script '{script_name}' not found") + + +@router.get("/configs/", response_model=List[str]) +async def list_script_configs(): + """List all script configurations.""" + return [f.replace('.yml', '') for f in file_system.list_files('conf/scripts') if f.endswith('.yml')] \ No newline at end of file From 3410a824032a2ac6c3fd8255155786fcc535c429 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:55:17 +0800 Subject: [PATCH 040/244] (feat) persist account state in db --- services/accounts_service.py | 231 +++++++++++++++++++++++++++++++---- 1 file changed, 210 insertions(+), 21 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 3a458512..00abe8dd 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -3,12 +3,13 @@ import logging from datetime import datetime from decimal import Decimal -from typing import Optional +from typing import Dict, List, Optional from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from config import BANNED_TOKENS, CONFIG_PASSWORD +from config import BANNED_TOKENS, CONFIG_PASSWORD, DATABASE_URL +from database import AsyncDatabaseManager, AccountRepository from utils.connector_manager import ConnectorManager from utils.file_system import FileSystemUtil @@ -31,7 +32,6 @@ def __init__(self, update_account_state_interval_minutes: int = 5, default_quote: str = "USDT", account_history_file: str = "account_state_history.json"): - # TODO: Add database to store the balances of each account each time it is updated. self.secrets_manager = ETHKeyFileSecretManger(CONFIG_PASSWORD) self.connector_manager = ConnectorManager(self.secrets_manager) self.accounts = {} @@ -42,7 +42,17 @@ def __init__(self, self.default_quote = default_quote self.history_file = account_history_file self._update_account_state_task: Optional[asyncio.Task] = None + + # Database setup + self.db_manager = AsyncDatabaseManager(DATABASE_URL) + self._db_initialized = False + async def ensure_db_initialized(self): + """Ensure database is initialized before using it.""" + if not self._db_initialized: + await self.db_manager.create_tables() + self._db_initialized = True + def get_accounts_state(self): return self.accounts_state @@ -88,30 +98,65 @@ async def update_account_state_loop(self): async def dump_account_state(self): """ - Dump the current account state to a JSON file. Create it if the file not exists. + Save the current account state to the database. :return: """ - timestamp = datetime.now().isoformat() - state_to_dump = {"timestamp": timestamp, "state": self.accounts_state} - if not file_system.path_exists(path=f"data/{self.history_file}"): - file_system.add_file(directory="data", file_name=self.history_file, content=json.dumps(state_to_dump) + "\n") - else: - file_system.append_to_file(directory="data", file_name=self.history_file, content=json.dumps(state_to_dump) + "\n") + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + + # Save each account-connector combination + for account_name, connectors in self.accounts_state.items(): + for connector_name, tokens_info in connectors.items(): + if tokens_info: # Only save if there's token data + await repository.save_account_state(account_name, connector_name, tokens_info) + + except Exception as e: + logging.error(f"Error saving account state to database: {e}") + # Fallback to JSON file + timestamp = datetime.now().isoformat() + state_to_dump = {"timestamp": timestamp, "state": self.accounts_state} + if not file_system.path_exists(path=f"data/{self.history_file}"): + file_system.add_file(directory="data", file_name=self.history_file, content=json.dumps(state_to_dump) + "\n") + else: + file_system.append_to_file(directory="data", file_name=self.history_file, content=json.dumps(state_to_dump) + "\n") - def load_account_state_history(self): + async def load_account_state_history(self, + limit: Optional[int] = None, + cursor: Optional[str] = None, + start_time: Optional[datetime] = None, + end_time: Optional[datetime] = None): """ - Load the account state history from the JSON file. - :return: List of account states with timestamps. + Load the account state history from the database with pagination. + :return: Tuple of (data, next_cursor, has_more). """ - history = [] + await self.ensure_db_initialized() + try: - with open("bots/data/" + self.history_file, "r") as file: - for line in file: - if line.strip(): # Check if the line is not empty - history.append(json.loads(line)) - except FileNotFoundError: - logging.warning("No account state history file found.") - return history + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_account_state_history( + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + except Exception as e: + logging.error(f"Error loading account state history from database: {e}") + # Fallback to JSON file (simplified, no pagination) + history = [] + try: + with open("bots/data/" + self.history_file, "r") as file: + for line in file: + if line.strip(): # Check if the line is not empty + history.append(json.loads(line)) + if limit and len(history) >= limit: + break + except FileNotFoundError: + logging.warning("No account state history file found.") + return history, None, False async def check_all_connectors(self): """ @@ -317,3 +362,147 @@ def delete_account(self, account_name: str): self.accounts_state.pop(account_name) # Clear all connectors for this account from cache self.connector_manager.clear_cache(account_name) + + async def get_account_current_state(self, account_name: str) -> Dict[str, List[Dict]]: + """ + Get current state for a specific account from database. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_account_current_state(account_name) + except Exception as e: + logging.error(f"Error getting account current state: {e}") + # Fallback to in-memory state + return self.accounts_state.get(account_name, {}) + + async def get_account_state_history(self, + account_name: str, + limit: Optional[int] = None, + cursor: Optional[str] = None, + start_time: Optional[datetime] = None, + end_time: Optional[datetime] = None): + """ + Get historical state for a specific account with pagination. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_account_state_history( + account_name=account_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + except Exception as e: + logging.error(f"Error getting account state history: {e}") + return [], None, False + + async def get_connector_current_state(self, account_name: str, connector_name: str) -> List[Dict]: + """ + Get current state for a specific connector. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_connector_current_state(account_name, connector_name) + except Exception as e: + logging.error(f"Error getting connector current state: {e}") + # Fallback to in-memory state + return self.accounts_state.get(account_name, {}).get(connector_name, []) + + async def get_connector_state_history(self, + account_name: str, + connector_name: str, + limit: Optional[int] = None, + cursor: Optional[str] = None, + start_time: Optional[datetime] = None, + end_time: Optional[datetime] = None): + """ + Get historical state for a specific connector with pagination. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_account_state_history( + account_name=account_name, + connector_name=connector_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + except Exception as e: + logging.error(f"Error getting connector state history: {e}") + return [], None, False + + async def get_all_unique_tokens(self) -> List[str]: + """ + Get all unique tokens across all accounts and connectors. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_all_unique_tokens() + except Exception as e: + logging.error(f"Error getting unique tokens: {e}") + # Fallback to in-memory state + tokens = set() + for account_data in self.accounts_state.values(): + for connector_data in account_data.values(): + for token_info in connector_data: + tokens.add(token_info.get("token")) + return sorted(list(tokens)) + + async def get_token_current_state(self, token: str) -> List[Dict]: + """ + Get current state of a specific token across all accounts. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_token_current_state(token) + except Exception as e: + logging.error(f"Error getting token current state: {e}") + return [] + + async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[str, any]: + """ + Get total portfolio value, optionally filtered by account. + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + repository = AccountRepository(session) + return await repository.get_portfolio_value(account_name) + except Exception as e: + logging.error(f"Error getting portfolio value: {e}") + # Fallback to in-memory calculation + portfolio = {"accounts": {}, "total_value": 0} + + accounts_to_process = [account_name] if account_name else self.accounts_state.keys() + + for acc_name in accounts_to_process: + account_value = 0 + if acc_name in self.accounts_state: + for connector_data in self.accounts_state[acc_name].values(): + for token_info in connector_data: + account_value += token_info.get("value", 0) + portfolio["accounts"][acc_name] = account_value + portfolio["total_value"] += account_value + + return portfolio From 7730f64fab2b8dffaa783a4c6712184749e530d8 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:55:32 +0800 Subject: [PATCH 041/244] (feat) import proper router names --- main.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/main.py b/main.py index 42c98398..629aa7fa 100644 --- a/main.py +++ b/main.py @@ -18,11 +18,12 @@ accounts, backtesting, bot_orchestration, + controllers, databases, docker, - files, market_data, performance, + scripts, ) # Configure logging @@ -137,14 +138,15 @@ def auth_user( return credentials.username # Include all routers with authentication -app.include_router(manage_docker.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_accounts.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_bot_orchestration.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_files.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_market_data.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_backtesting.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_databases.router, dependencies=[Depends(auth_user)]) -app.include_router(manage_performance.router, dependencies=[Depends(auth_user)]) +app.include_router(docker.router, dependencies=[Depends(auth_user)]) +app.include_router(accounts.router, dependencies=[Depends(auth_user)]) +app.include_router(bot_orchestration.router, dependencies=[Depends(auth_user)]) +app.include_router(controllers.router, dependencies=[Depends(auth_user)]) +app.include_router(scripts.router, dependencies=[Depends(auth_user)]) +app.include_router(market_data.router, dependencies=[Depends(auth_user)]) +app.include_router(backtesting.router, dependencies=[Depends(auth_user)]) +app.include_router(databases.router, dependencies=[Depends(auth_user)]) +app.include_router(performance.router, dependencies=[Depends(auth_user)]) @app.get("/") async def root(): From 83fc1e7d20d6e2ee88fba1479a0ead44eace8f5d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:56:03 +0800 Subject: [PATCH 042/244] (feat) add connection and db models --- database/connection.py | 93 ++++++++++++++++++++++++++++++++++++++++++ database/models.py | 38 +++++++++++++++++ 2 files changed, 131 insertions(+) create mode 100644 database/connection.py create mode 100644 database/models.py diff --git a/database/connection.py b/database/connection.py new file mode 100644 index 00000000..cb4c9521 --- /dev/null +++ b/database/connection.py @@ -0,0 +1,93 @@ +import logging +from contextlib import asynccontextmanager +from typing import AsyncGenerator + +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine, async_sessionmaker + +from .models import Base + +logger = logging.getLogger(__name__) + + +class AsyncDatabaseManager: + def __init__(self, database_url: str): + # Convert postgresql:// to postgresql+asyncpg:// for async support + if database_url.startswith("postgresql://"): + database_url = database_url.replace("postgresql://", "postgresql+asyncpg://") + + self.engine = create_async_engine( + database_url, + # Connection pool settings for async + pool_size=5, + max_overflow=10, + pool_timeout=30, + pool_recycle=1800, # Recycle connections after 30 minutes + pool_pre_ping=True, # Test connections before using them + # Engine settings + echo=False, # Set to True for SQL query logging + echo_pool=False, # Set to True for connection pool logging + # Connection arguments for asyncpg + connect_args={ + "server_settings": {"application_name": "backend-api"}, + "command_timeout": 60, + } + ) + self.async_session = async_sessionmaker( + self.engine, + class_=AsyncSession, + expire_on_commit=False + ) + + async def create_tables(self): + """Create all tables defined in the models.""" + try: + async with self.engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + logger.info("Database tables created successfully") + except Exception as e: + logger.error(f"Failed to create database tables: {e}") + raise + + async def close(self): + """Close all database connections.""" + await self.engine.dispose() + logger.info("Database connections closed") + + def get_session(self) -> AsyncSession: + """Get a new database session.""" + return self.async_session() + + @asynccontextmanager + async def get_session_context(self) -> AsyncGenerator[AsyncSession, None]: + """ + Get a database session with automatic error handling and cleanup. + + Usage: + async with db_manager.get_session_context() as session: + # Use session here + """ + async with self.async_session() as session: + try: + yield session + await session.commit() + except Exception: + await session.rollback() + raise + finally: + await session.close() + + async def health_check(self) -> bool: + """ + Check if the database connection is healthy. + + Returns: + bool: True if connection is healthy, False otherwise. + """ + try: + async with self.engine.connect() as conn: + await conn.execute(text("SELECT 1")) + return True + except Exception as e: + logger.error(f"Database health check failed: {e}") + return False \ No newline at end of file diff --git a/database/models.py b/database/models.py new file mode 100644 index 00000000..afc5aa12 --- /dev/null +++ b/database/models.py @@ -0,0 +1,38 @@ +from sqlalchemy import ( + TIMESTAMP, + Column, + ForeignKey, + Integer, + Numeric, + String, + func, +) +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship + +Base = declarative_base() + + +class AccountState(Base): + __tablename__ = "account_states" + + id = Column(Integer, primary_key=True, index=True) + timestamp = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + account_name = Column(String, nullable=False, index=True) + connector_name = Column(String, nullable=False, index=True) + + token_states = relationship("TokenState", back_populates="account_state", cascade="all, delete-orphan") + + +class TokenState(Base): + __tablename__ = "token_states" + + id = Column(Integer, primary_key=True, index=True) + account_state_id = Column(Integer, ForeignKey("account_states.id"), nullable=False) + token = Column(String, nullable=False, index=True) + units = Column(Numeric(precision=30, scale=18), nullable=False) + price = Column(Numeric(precision=30, scale=18), nullable=False) + value = Column(Numeric(precision=30, scale=18), nullable=False) + available_units = Column(Numeric(precision=30, scale=18), nullable=False) + + account_state = relationship("AccountState", back_populates="token_states") \ No newline at end of file From 8a9a83448477eb21ccb48914bd4706fd3d00bf55 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 10 Jun 2025 17:56:09 +0800 Subject: [PATCH 043/244] (feat) add account repository --- database/repositories/__init__.py | 3 + database/repositories/account_repository.py | 355 ++++++++++++++++++++ 2 files changed, 358 insertions(+) create mode 100644 database/repositories/__init__.py create mode 100644 database/repositories/account_repository.py diff --git a/database/repositories/__init__.py b/database/repositories/__init__.py new file mode 100644 index 00000000..9fb47431 --- /dev/null +++ b/database/repositories/__init__.py @@ -0,0 +1,3 @@ +from .account_repository import AccountRepository + +__all__ = ["AccountRepository"] \ No newline at end of file diff --git a/database/repositories/account_repository.py b/database/repositories/account_repository.py new file mode 100644 index 00000000..5f89c8b6 --- /dev/null +++ b/database/repositories/account_repository.py @@ -0,0 +1,355 @@ +from datetime import datetime +from decimal import Decimal +from typing import Dict, List, Optional, Tuple +import base64 +import json + +from sqlalchemy import desc, select, func +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload + +from database import AccountState, TokenState + + +class AccountRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def save_account_state(self, account_name: str, connector_name: str, tokens_info: List[Dict]) -> AccountState: + """ + Save account state with token information to the database. + """ + account_state = AccountState( + account_name=account_name, + connector_name=connector_name + ) + + self.session.add(account_state) + await self.session.flush() # Get the ID + + for token_info in tokens_info: + token_state = TokenState( + account_state_id=account_state.id, + token=token_info["token"], + units=Decimal(str(token_info["units"])), + price=Decimal(str(token_info["price"])), + value=Decimal(str(token_info["value"])), + available_units=Decimal(str(token_info["available_units"])) + ) + self.session.add(token_state) + + await self.session.commit() + return account_state + + async def get_latest_account_states(self) -> Dict[str, Dict[str, List[Dict]]]: + """ + Get the latest account states for all accounts and connectors. + """ + # Get the latest timestamp for each account-connector combination + subquery = ( + select( + AccountState.account_name, + AccountState.connector_name, + func.max(AccountState.timestamp).label("max_timestamp") + ) + .group_by(AccountState.account_name, AccountState.connector_name) + .subquery() + ) + + # Get the full records for the latest timestamps + query = ( + select(AccountState) + .options(joinedload(AccountState.token_states)) + .join( + subquery, + (AccountState.account_name == subquery.c.account_name) & + (AccountState.connector_name == subquery.c.connector_name) & + (AccountState.timestamp == subquery.c.max_timestamp) + ) + ) + + result = await self.session.execute(query) + account_states = result.unique().scalars().all() + + # Convert to the expected format + accounts_state = {} + for account_state in account_states: + if account_state.account_name not in accounts_state: + accounts_state[account_state.account_name] = {} + + token_info = [] + for token_state in account_state.token_states: + token_info.append({ + "token": token_state.token, + "units": float(token_state.units), + "price": float(token_state.price), + "value": float(token_state.value), + "available_units": float(token_state.available_units) + }) + + accounts_state[account_state.account_name][account_state.connector_name] = token_info + + return accounts_state + + async def get_account_state_history(self, + limit: Optional[int] = None, + account_name: Optional[str] = None, + connector_name: Optional[str] = None, + cursor: Optional[str] = None, + start_time: Optional[datetime] = None, + end_time: Optional[datetime] = None) -> Tuple[List[Dict], Optional[str], bool]: + """ + Get historical account states with cursor-based pagination. + + Returns: + Tuple of (data, next_cursor, has_more) + """ + query = ( + select(AccountState) + .options(joinedload(AccountState.token_states)) + .order_by(desc(AccountState.timestamp)) + ) + + # Apply filters + if account_name: + query = query.filter(AccountState.account_name == account_name) + if connector_name: + query = query.filter(AccountState.connector_name == connector_name) + if start_time: + query = query.filter(AccountState.timestamp >= start_time) + if end_time: + query = query.filter(AccountState.timestamp <= end_time) + + # Handle cursor-based pagination + if cursor: + try: + cursor_time = datetime.fromisoformat(cursor.replace('Z', '+00:00')) + query = query.filter(AccountState.timestamp < cursor_time) + except (ValueError, TypeError): + # Invalid cursor, ignore it + pass + + # Fetch limit + 1 to check if there are more records + fetch_limit = limit + 1 if limit else 101 + query = query.limit(fetch_limit) + + result = await self.session.execute(query) + account_states = result.unique().scalars().all() + + # Check if there are more records + has_more = len(account_states) == fetch_limit + if has_more: + account_states = account_states[:-1] # Remove the extra record + + # Generate next cursor + next_cursor = None + if has_more and account_states: + next_cursor = account_states[-1].timestamp.isoformat() + + # Format response + history = [] + for account_state in account_states: + token_info = [] + for token_state in account_state.token_states: + token_info.append({ + "token": token_state.token, + "units": float(token_state.units), + "price": float(token_state.price), + "value": float(token_state.value), + "available_units": float(token_state.available_units) + }) + + state_dict = { + "timestamp": account_state.timestamp.isoformat(), + "state": { + account_state.account_name: { + account_state.connector_name: token_info + } + } + } + history.append(state_dict) + + return history, next_cursor, has_more + + async def get_account_current_state(self, account_name: str) -> Dict[str, List[Dict]]: + """ + Get the current state for a specific account. + """ + subquery = ( + select( + AccountState.connector_name, + func.max(AccountState.timestamp).label("max_timestamp") + ) + .filter(AccountState.account_name == account_name) + .group_by(AccountState.connector_name) + .subquery() + ) + + query = ( + select(AccountState) + .options(joinedload(AccountState.token_states)) + .join( + subquery, + (AccountState.connector_name == subquery.c.connector_name) & + (AccountState.timestamp == subquery.c.max_timestamp) + ) + .filter(AccountState.account_name == account_name) + ) + + result = await self.session.execute(query) + account_states = result.unique().scalars().all() + + state = {} + for account_state in account_states: + token_info = [] + for token_state in account_state.token_states: + token_info.append({ + "token": token_state.token, + "units": float(token_state.units), + "price": float(token_state.price), + "value": float(token_state.value), + "available_units": float(token_state.available_units) + }) + state[account_state.connector_name] = token_info + + return state + + async def get_connector_current_state(self, account_name: str, connector_name: str) -> List[Dict]: + """ + Get the current state for a specific connector. + """ + query = ( + select(AccountState) + .options(joinedload(AccountState.token_states)) + .filter( + AccountState.account_name == account_name, + AccountState.connector_name == connector_name + ) + .order_by(desc(AccountState.timestamp)) + .limit(1) + ) + + result = await self.session.execute(query) + account_state = result.unique().scalar_one_or_none() + + if not account_state: + return [] + + token_info = [] + for token_state in account_state.token_states: + token_info.append({ + "token": token_state.token, + "units": float(token_state.units), + "price": float(token_state.price), + "value": float(token_state.value), + "available_units": float(token_state.available_units) + }) + + return token_info + + async def get_all_unique_tokens(self) -> List[str]: + """ + Get all unique tokens across all accounts and connectors. + """ + query = ( + select(TokenState.token) + .distinct() + .order_by(TokenState.token) + ) + + result = await self.session.execute(query) + tokens = result.scalars().all() + + return list(tokens) + + async def get_token_current_state(self, token: str) -> List[Dict]: + """ + Get current state of a specific token across all accounts. + """ + # Get latest timestamps for each account-connector combination + subquery = ( + select( + AccountState.id, + AccountState.account_name, + AccountState.connector_name, + func.max(AccountState.timestamp).label("max_timestamp") + ) + .group_by(AccountState.account_name, AccountState.connector_name, AccountState.id) + .subquery() + ) + + query = ( + select(TokenState, AccountState.account_name, AccountState.connector_name) + .join(AccountState) + .join( + subquery, + (AccountState.id == subquery.c.id) & + (AccountState.timestamp == subquery.c.max_timestamp) + ) + .filter(TokenState.token == token) + ) + + result = await self.session.execute(query) + token_states = result.all() + + states = [] + for token_state, account_name, connector_name in token_states: + states.append({ + "account_name": account_name, + "connector_name": connector_name, + "units": float(token_state.units), + "price": float(token_state.price), + "value": float(token_state.value), + "available_units": float(token_state.available_units) + }) + + return states + + async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict: + """ + Get total portfolio value, optionally filtered by account. + """ + # Get latest timestamps + subquery = ( + select( + AccountState.account_name, + AccountState.connector_name, + func.max(AccountState.timestamp).label("max_timestamp") + ) + .group_by(AccountState.account_name, AccountState.connector_name) + ) + + if account_name: + subquery = subquery.filter(AccountState.account_name == account_name) + + subquery = subquery.subquery() + + # Get token values + query = ( + select( + AccountState.account_name, + func.sum(TokenState.value).label("total_value") + ) + .join(TokenState) + .join( + subquery, + (AccountState.account_name == subquery.c.account_name) & + (AccountState.connector_name == subquery.c.connector_name) & + (AccountState.timestamp == subquery.c.max_timestamp) + ) + .group_by(AccountState.account_name) + ) + + result = await self.session.execute(query) + values = result.all() + + portfolio = { + "accounts": {}, + "total_value": 0 + } + + for account, value in values: + portfolio["accounts"][account] = float(value or 0) + portfolio["total_value"] += float(value or 0) + + return portfolio \ No newline at end of file From 6a8f234d0b97b67608096faa5ab0f304d4589719 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:02:06 +0800 Subject: [PATCH 044/244] (feat) update v2 with controllers to new version --- bots/scripts/v2_with_controllers.py | 107 +++------------------------- 1 file changed, 9 insertions(+), 98 deletions(-) diff --git a/bots/scripts/v2_with_controllers.py b/bots/scripts/v2_with_controllers.py index c62d585c..909b3b1f 100644 --- a/bots/scripts/v2_with_controllers.py +++ b/bots/scripts/v2_with_controllers.py @@ -1,12 +1,10 @@ import os -import time from decimal import Decimal from typing import Dict, List, Optional, Set from hummingbot.client.hummingbot_application import HummingbotApplication from hummingbot.connector.connector_base import ConnectorBase from hummingbot.core.clock import Clock -from hummingbot.core.data_type.common import OrderType, TradeType from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.remote_iface.mqtt import ETopicPublisher from hummingbot.strategy.strategy_v2_base import StrategyV2Base, StrategyV2ConfigBase @@ -14,20 +12,15 @@ from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, StopExecutorAction -class GenericV2StrategyWithCashOutConfig(StrategyV2ConfigBase): +class V2WithControllersConfig(StrategyV2ConfigBase): script_file_name: str = os.path.basename(__file__) candles_config: List[CandlesConfig] = [] markets: Dict[str, Set[str]] = {} - time_to_cash_out: Optional[int] = None max_global_drawdown: Optional[float] = None max_controller_drawdown: Optional[float] = None - rebalance_interval: Optional[int] = None - extra_inventory: Optional[float] = 0.02 - min_amount_to_rebalance_usd: Decimal = Decimal("8") - asset_to_rebalance: str = "USDT" -class GenericV2StrategyWithCashOut(StrategyV2Base): +class V2WithControllers(StrategyV2Base): """ This script runs a generic strategy with cash out feature. Will also check if the controllers configs have been updated and apply the new settings. @@ -40,25 +33,17 @@ class GenericV2StrategyWithCashOut(StrategyV2Base): """ performance_report_interval: int = 1 - def __init__(self, connectors: Dict[str, ConnectorBase], config: GenericV2StrategyWithCashOutConfig): + def __init__(self, connectors: Dict[str, ConnectorBase], config: V2WithControllersConfig): super().__init__(connectors, config) self.config = config - self.cashing_out = False self.max_pnl_by_controller = {} self.performance_reports = {} self.max_global_pnl = Decimal("0") self.drawdown_exited_controllers = [] self.closed_executors_buffer: int = 30 - self.rebalance_interval: int = self.config.rebalance_interval self._last_performance_report_timestamp = 0 - self._last_rebalance_check_timestamp = 0 - hb_app = HummingbotApplication.main_application() - self.mqtt_enabled = hb_app._mqtt is not None + self.mqtt_enabled = HummingbotApplication.main_application()._mqtt is not None self._pub: Optional[ETopicPublisher] = None - if self.config.time_to_cash_out: - self.cash_out_time = self.config.time_to_cash_out + time.time() - else: - self.cash_out_time = None def start(self, clock: Clock, timestamp: float) -> None: """ @@ -80,70 +65,10 @@ async def on_stop(self): def on_tick(self): super().on_tick() self.performance_reports = {controller_id: self.executor_orchestrator.generate_performance_report(controller_id=controller_id).dict() for controller_id in self.controllers.keys()} - self.control_rebalance() - self.control_cash_out() + self.check_manual_kill_switch() self.control_max_drawdown() self.send_performance_report() - def control_rebalance(self): - if self.rebalance_interval and self._last_rebalance_check_timestamp + self.rebalance_interval <= self.current_timestamp: - balance_required = {} - for controller_id, controller in self.controllers.items(): - connector_name = controller.config.dict().get("connector_name") - if connector_name and "perpetual" in connector_name: - continue - if connector_name not in balance_required: - balance_required[connector_name] = {} - tokens_required = controller.get_balance_requirements() - for token, amount in tokens_required: - if token not in balance_required[connector_name]: - balance_required[connector_name][token] = amount - else: - balance_required[connector_name][token] += amount - for connector_name, balance_requirements in balance_required.items(): - connector = self.connectors[connector_name] - for token, amount in balance_requirements.items(): - if token == self.config.asset_to_rebalance: - continue - balance = connector.get_balance(token) - trading_pair = f"{token}-{self.config.asset_to_rebalance}" - mid_price = connector.get_mid_price(trading_pair) - trading_rule = connector.trading_rules[trading_pair] - amount_with_safe_margin = amount * (1 + Decimal(self.config.extra_inventory)) - active_executors_for_pair = self.filter_executors( - executors=self.get_all_executors(), - filter_func=lambda x: x.is_active and x.trading_pair == trading_pair and x.connector_name == connector_name - ) - unmatched_amount = sum([executor.filled_amount_quote for executor in active_executors_for_pair if executor.side == TradeType.SELL]) - sum([executor.filled_amount_quote for executor in active_executors_for_pair if executor.side == TradeType.BUY]) - balance += unmatched_amount / mid_price - base_balance_diff = balance - amount_with_safe_margin - abs_balance_diff = abs(base_balance_diff) - trading_rules_condition = abs_balance_diff > trading_rule.min_order_size and abs_balance_diff * mid_price > trading_rule.min_notional_size and abs_balance_diff * mid_price > self.config.min_amount_to_rebalance_usd - order_type = OrderType.MARKET - if base_balance_diff > 0: - if trading_rules_condition: - self.logger().info(f"Rebalance: Selling {amount_with_safe_margin} {token} to {self.config.asset_to_rebalance}. Balance: {balance} | Executors unmatched balance {unmatched_amount / mid_price}") - connector.sell( - trading_pair=trading_pair, - amount=abs_balance_diff, - order_type=order_type, - price=mid_price) - else: - self.logger().info("Skipping rebalance due a low amount to sell that may cause future imbalance") - else: - if not trading_rules_condition: - amount = max([self.config.min_amount_to_rebalance_usd / mid_price, trading_rule.min_order_size, trading_rule.min_notional_size / mid_price]) - self.logger().info(f"Rebalance: Buying for a higher value to avoid future imbalance {amount} {token} to {self.config.asset_to_rebalance}. Balance: {balance} | Executors unmatched balance {unmatched_amount}") - else: - amount = abs_balance_diff - self.logger().info(f"Rebalance: Buying {amount} {token} to {self.config.asset_to_rebalance}. Balance: {balance} | Executors unmatched balance {unmatched_amount}") - connector.buy( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=mid_price) - self._last_rebalance_check_timestamp = self.current_timestamp - def control_max_drawdown(self): if self.config.max_controller_drawdown: self.check_max_controller_drawdown() @@ -188,23 +113,9 @@ def send_performance_report(self): self._pub(self.performance_reports) self._last_performance_report_timestamp = self.current_timestamp - def control_cash_out(self): - self.evaluate_cash_out_time() - if self.cashing_out: - self.check_executors_status() - else: - self.check_manual_cash_out() - - def evaluate_cash_out_time(self): - if self.cash_out_time and self.current_timestamp >= self.cash_out_time and not self.cashing_out: - self.logger().info("Cash out time reached. Stopping the controllers.") - for controller_id, controller in self.controllers.items(): - if controller.status == RunnableStatus.RUNNING: - self.logger().info(f"Cash out for controller {controller_id}.") - controller.stop() - self.cashing_out = True - - def check_manual_cash_out(self): + def check_manual_kill_switch(self): + if self._is_stop_triggered: + return for controller_id, controller in self.controllers.items(): if controller.config.manual_kill_switch and controller.status == RunnableStatus.RUNNING: self.logger().info(f"Manual cash out for controller {controller_id}.") @@ -246,7 +157,7 @@ def apply_initial_setting(self): connectors_position_mode = {} for controller_id, controller in self.controllers.items(): self.max_pnl_by_controller[controller_id] = Decimal("0") - config_dict = controller.config.dict() + config_dict = controller.config.model_dump() if "connector_name" in config_dict: if self.is_perpetual(config_dict["connector_name"]): if "position_mode" in config_dict: From 6fc205543fc134688fc68dd27294c30050e513a6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:02:18 +0800 Subject: [PATCH 045/244] (feat) sync controllers --- .../directional_trading/dman_v3.py | 5 +- .../generic/arbitrage_controller.py | 17 +- .../generic/basic_order_example.py | 27 +- .../generic/basic_order_open_close_example.py | 10 +- bots/controllers/generic/grid_strike.py | 14 +- bots/controllers/generic/pmm.py | 249 +++++-- bots/controllers/generic/pmm_adjusted.py | 669 ++++++++++++++++++ bots/controllers/generic/stat_arb.py | 475 +++++++++++++ .../market_making/dman_maker_v2.py | 3 +- 9 files changed, 1360 insertions(+), 109 deletions(-) create mode 100644 bots/controllers/generic/pmm_adjusted.py create mode 100644 bots/controllers/generic/stat_arb.py diff --git a/bots/controllers/directional_trading/dman_v3.py b/bots/controllers/directional_trading/dman_v3.py index ca648d76..8e4ee07e 100644 --- a/bots/controllers/directional_trading/dman_v3.py +++ b/bots/controllers/directional_trading/dman_v3.py @@ -3,6 +3,9 @@ from typing import List, Optional, Tuple import pandas_ta as ta # noqa: F401 +from pydantic import Field, field_validator +from pydantic_core.core_schema import ValidationInfo + from hummingbot.core.data_type.common import TradeType from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy_v2.controllers.directional_trading_controller_base import ( @@ -11,8 +14,6 @@ ) from hummingbot.strategy_v2.executors.dca_executor.data_types import DCAExecutorConfig, DCAMode from hummingbot.strategy_v2.executors.position_executor.data_types import TrailingStop -from pydantic import Field, field_validator -from pydantic_core.core_schema import ValidationInfo class DManV3ControllerConfig(DirectionalTradingControllerConfigBase): diff --git a/bots/controllers/generic/arbitrage_controller.py b/bots/controllers/generic/arbitrage_controller.py index ff8f6517..825a8663 100644 --- a/bots/controllers/generic/arbitrage_controller.py +++ b/bots/controllers/generic/arbitrage_controller.py @@ -1,9 +1,10 @@ from decimal import Decimal -from typing import Dict, List, Set +from typing import List import pandas as pd from hummingbot.client.ui.interface_utils import format_df_for_printout +from hummingbot.core.data_type.common import MarketDict from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy_v2.controllers.controller_base import ControllerBase, ControllerConfigBase from hummingbot.strategy_v2.executors.arbitrage_executor.data_types import ArbitrageExecutorConfig @@ -23,18 +24,8 @@ class ArbitrageControllerConfig(ControllerConfigBase): rate_connector: str = "binance" quote_conversion_asset: str = "USDT" - def update_markets(self, markets: Dict[str, Set[str]]) -> Dict[str, Set[str]]: - if self.exchange_pair_1.connector_name == self.exchange_pair_2.connector_name: - markets.update({ - self.exchange_pair_1.connector_name: {self.exchange_pair_1.trading_pair, - self.exchange_pair_2.trading_pair} - }) - else: - markets.update({ - self.exchange_pair_1.connector_name: {self.exchange_pair_1.trading_pair}, - self.exchange_pair_2.connector_name: {self.exchange_pair_2.trading_pair} - }) - return markets + def update_markets(self, markets: MarketDict) -> MarketDict: + return [markets.add_or_update(cp.connector_name, cp.trading_pair) for cp in [self.exchange_pair_1, self.exchange_pair_2]][-1] class ArbitrageController(ControllerBase): diff --git a/bots/controllers/generic/basic_order_example.py b/bots/controllers/generic/basic_order_example.py index 10368da4..b1cb4e04 100644 --- a/bots/controllers/generic/basic_order_example.py +++ b/bots/controllers/generic/basic_order_example.py @@ -1,7 +1,6 @@ from decimal import Decimal -from typing import Dict, Set -from hummingbot.core.data_type.common import PositionMode, PriceType, TradeType +from hummingbot.core.data_type.common import MarketDict, PositionMode, PriceType, TradeType from hummingbot.strategy_v2.controllers import ControllerBase, ControllerConfigBase from hummingbot.strategy_v2.executors.order_executor.data_types import ExecutionStrategy, OrderExecutorConfig from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction @@ -9,20 +8,16 @@ class BasicOrderExampleConfig(ControllerConfigBase): controller_name: str = "basic_order_example" - controller_type: str = "generic" connector_name: str = "binance_perpetual" trading_pair: str = "WLD-USDT" side: TradeType = TradeType.BUY position_mode: PositionMode = PositionMode.HEDGE - leverage: int = 50 + leverage: int = 20 amount_quote: Decimal = Decimal("10") order_frequency: int = 10 - def update_markets(self, markets: Dict[str, Set[str]]) -> Dict[str, Set[str]]: - if self.connector_name not in markets: - markets[self.connector_name] = set() - markets[self.connector_name].add(self.trading_pair) - return markets + def update_markets(self, markets: MarketDict) -> MarketDict: + return markets.add_or_update(self.connector_name, self.trading_pair) class BasicOrderExample(ControllerBase): @@ -31,6 +26,11 @@ def __init__(self, config: BasicOrderExampleConfig, *args, **kwargs): self.config = config self.last_timestamp = 0 + async def update_processed_data(self): + mid_price = self.market_data_provider.get_price_by_type(self.config.connector_name, self.config.trading_pair, PriceType.MidPrice) + n_active_executors = len([executor for executor in self.executors_info if executor.is_active]) + self.processed_data = {"mid_price": mid_price, "n_active_executors": n_active_executors} + def determine_executor_actions(self) -> list[ExecutorAction]: if (self.processed_data["n_active_executors"] == 0 and self.market_data_provider.time() - self.last_timestamp > self.config.order_frequency): @@ -44,12 +44,5 @@ def determine_executor_actions(self) -> list[ExecutorAction]: execution_strategy=ExecutionStrategy.MARKET, price=self.processed_data["mid_price"], ) - return [CreateExecutorAction( - controller_id=self.config.id, - executor_config=config)] + return [CreateExecutorAction(controller_id=self.config.id, executor_config=config)] return [] - - async def update_processed_data(self): - mid_price = self.market_data_provider.get_price_by_type(self.config.connector_name, self.config.trading_pair, PriceType.MidPrice) - n_active_executors = len([executor for executor in self.executors_info if executor.is_active]) - self.processed_data = {"mid_price": mid_price, "n_active_executors": n_active_executors} diff --git a/bots/controllers/generic/basic_order_open_close_example.py b/bots/controllers/generic/basic_order_open_close_example.py index bfeef02d..1cea9bbd 100644 --- a/bots/controllers/generic/basic_order_open_close_example.py +++ b/bots/controllers/generic/basic_order_open_close_example.py @@ -1,7 +1,6 @@ from decimal import Decimal -from typing import Dict, Set -from hummingbot.core.data_type.common import PositionAction, PositionMode, PriceType, TradeType +from hummingbot.core.data_type.common import MarketDict, PositionAction, PositionMode, PriceType, TradeType from hummingbot.strategy_v2.controllers import ControllerBase, ControllerConfigBase from hummingbot.strategy_v2.executors.order_executor.data_types import ExecutionStrategy, OrderExecutorConfig from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction @@ -20,11 +19,8 @@ class BasicOrderOpenCloseExampleConfig(ControllerConfigBase): close_partial_position: bool = False amount_quote: Decimal = Decimal("20") - def update_markets(self, markets: Dict[str, Set[str]]) -> Dict[str, Set[str]]: - if self.connector_name not in markets: - markets[self.connector_name] = set() - markets[self.connector_name].add(self.trading_pair) - return markets + def update_markets(self, markets: MarketDict) -> MarketDict: + return markets.add_or_update(self.connector_name, self.trading_pair) class BasicOrderOpenClose(ControllerBase): diff --git a/bots/controllers/generic/grid_strike.py b/bots/controllers/generic/grid_strike.py index a45b83c7..825082c4 100644 --- a/bots/controllers/generic/grid_strike.py +++ b/bots/controllers/generic/grid_strike.py @@ -1,7 +1,9 @@ from decimal import Decimal -from typing import Dict, List, Optional, Set +from typing import List, Optional -from hummingbot.core.data_type.common import OrderType, PositionMode, PriceType, TradeType +from pydantic import Field + +from hummingbot.core.data_type.common import MarketDict, OrderType, PositionMode, PriceType, TradeType from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy_v2.controllers import ControllerBase, ControllerConfigBase from hummingbot.strategy_v2.executors.data_types import ConnectorPair @@ -9,7 +11,6 @@ from hummingbot.strategy_v2.executors.position_executor.data_types import TripleBarrierConfig from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction from hummingbot.strategy_v2.models.executors_info import ExecutorInfo -from pydantic import Field class GridStrikeConfig(ControllerConfigBase): @@ -51,11 +52,8 @@ class GridStrikeConfig(ControllerConfigBase): take_profit_order_type=OrderType.LIMIT_MAKER, ) - def update_markets(self, markets: Dict[str, Set[str]]) -> Dict[str, Set[str]]: - if self.connector_name not in markets: - markets[self.connector_name] = set() - markets[self.connector_name].add(self.trading_pair) - return markets + def update_markets(self, markets: MarketDict) -> MarketDict: + return markets.add_or_update(self.connector_name, self.trading_pair) class GridStrike(ControllerBase): diff --git a/bots/controllers/generic/pmm.py b/bots/controllers/generic/pmm.py index 7a66baf9..97e55135 100644 --- a/bots/controllers/generic/pmm.py +++ b/bots/controllers/generic/pmm.py @@ -1,11 +1,10 @@ from decimal import Decimal -from typing import Dict, List, Optional, Set, Tuple, Union +from typing import List, Optional, Tuple, Union from pydantic import Field, field_validator from pydantic_core.core_schema import ValidationInfo -from hummingbot.core.data_type.common import OrderType, PositionMode, PriceType, TradeType -from hummingbot.core.data_type.trade_fee import TokenAmount +from hummingbot.core.data_type.common import MarketDict, OrderType, PositionMode, PriceType, TradeType from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy_v2.controllers.controller_base import ControllerBase, ControllerConfigBase from hummingbot.strategy_v2.executors.data_types import ConnectorPair @@ -40,7 +39,7 @@ class PMMConfig(ControllerConfigBase): default=Decimal("0.05"), json_schema_extra={ "prompt_on_new": True, - "prompt": "Enter the portfolio allocation (e.g., 0.05 for 5%):", + "prompt": "Enter the maximum quote exposure percentage around mid price (e.g., 0.05 for 5% of total quote allocation):", } ) target_base_pct: Decimal = Field( @@ -136,6 +135,7 @@ class PMMConfig(ControllerConfigBase): } ) global_take_profit: Decimal = Decimal("0.02") + global_stop_loss: Decimal = Decimal("0.05") @field_validator("take_profit", mode="before") @classmethod @@ -234,11 +234,8 @@ def get_spreads_and_amounts_in_quote(self, trade_type: TradeType) -> Tuple[List[ spreads = getattr(self, f'{trade_type.name.lower()}_spreads') return spreads, [amt_pct * self.total_amount_quote * self.portfolio_allocation for amt_pct in normalized_amounts_pct] - def update_markets(self, markets: Dict[str, Set[str]]) -> Dict[str, Set[str]]: - if self.connector_name not in markets: - markets[self.connector_name] = set() - markets[self.connector_name].add(self.trading_pair) - return markets + def update_markets(self, markets: MarketDict) -> MarketDict: + return markets.add_or_update(self.connector_name, self.trading_pair) class PMM(ControllerBase): @@ -266,8 +263,20 @@ def create_actions_proposal(self) -> List[ExecutorAction]: Create actions proposal based on the current state of the controller. """ create_actions = [] - if self.processed_data["current_base_pct"] > self.config.target_base_pct and self.processed_data["unrealized_pnl_pct"] > self.config.global_take_profit: - # Create a global take profit executor + + # Check if a position reduction executor for TP/SL is already sent + reduction_executor_exists = any( + executor.is_active and + executor.custom_info.get("level_id") == "global_tp_sl" + for executor in self.executors_info + ) + + if (not reduction_executor_exists and + self.processed_data["current_base_pct"] > self.config.target_base_pct and + (self.processed_data["unrealized_pnl_pct"] > self.config.global_take_profit or + self.processed_data["unrealized_pnl_pct"] < -self.config.global_stop_loss)): + + # Create a global take profit or stop loss executor create_actions.append(CreateExecutorAction( controller_id=self.config.id, executor_config=OrderExecutorConfig( @@ -278,6 +287,7 @@ def create_actions_proposal(self) -> List[ExecutorAction]: amount=self.processed_data["position_amount"], execution_strategy=ExecutionStrategy.MARKET, price=self.processed_data["reference_price"], + level_id="global_tp_sl" # Use a specific level_id to identify this as a TP/SL executor ) )) return create_actions @@ -440,81 +450,198 @@ def get_not_active_levels_ids(self, active_levels_ids: List[str]) -> List[str]: return sell_ids_missing return buy_ids_missing + sell_ids_missing - def get_balance_requirements(self) -> List[TokenAmount]: - """ - Get the balance requirements for the controller. - """ - base_asset, quote_asset = self.config.trading_pair.split("-") - _, amounts_quote = self.config.get_spreads_and_amounts_in_quote(TradeType.BUY) - _, amounts_base = self.config.get_spreads_and_amounts_in_quote(TradeType.SELL) - return [TokenAmount(base_asset, Decimal(sum(amounts_base) / self.processed_data["reference_price"])), - TokenAmount(quote_asset, Decimal(sum(amounts_quote)))] - def to_format_status(self) -> List[str]: """ Get the status of the controller in a formatted way with ASCII visualizations. """ + from decimal import Decimal + from itertools import zip_longest + status = [] - status.append(f"Controller ID: {self.config.id}") - status.append(f"Connector: {self.config.connector_name}") - status.append(f"Trading Pair: {self.config.trading_pair}") - status.append(f"Portfolio Allocation: {self.config.portfolio_allocation}") - status.append(f"Reference Price: {self.processed_data['reference_price']}") - status.append(f"Spread Multiplier: {self.processed_data['spread_multiplier']}") - - # Base percentage visualization + + # Get all required data base_pct = self.processed_data['current_base_pct'] min_pct = self.config.min_base_pct max_pct = self.config.max_base_pct target_pct = self.config.target_base_pct - # Create base percentage bar - bar_width = 50 + skew = base_pct - target_pct + skew_pct = skew / target_pct if target_pct != 0 else Decimal('0') + max_skew = getattr(self.config, 'max_skew', Decimal('0.0')) + + # Fixed widths - adjusted based on screenshot analysis + outer_width = 92 # Total width including outer borders + inner_width = outer_width - 4 # Inner content width + half_width = (inner_width) // 2 - 1 # Width of each column in split sections + bar_width = inner_width - 15 # Width of visualization bars (accounting for label) + + # Header - omit ID since it's shown above in controller header + status.append("╒" + "═" * (inner_width) + "╕") + + header_line = ( + f"{self.config.connector_name}:{self.config.trading_pair} " + f"Price: {self.processed_data['reference_price']} " + f"Alloc: {self.config.portfolio_allocation:.1%} " + f"Spread Mult: {self.processed_data['spread_multiplier']} |" + ) + + status.append(f"│ {header_line:<{inner_width}} │") + + # Position and PnL sections with precise widths + status.append(f"├{'─' * half_width}┬{'─' * half_width}┤") + status.append(f"│ {'POSITION STATUS':<{half_width - 2}} │ {'PROFIT & LOSS':<{half_width - 2}} │") + status.append(f"├{'─' * half_width}┼{'─' * half_width}┤") + + # Position data for left column + position_info = [ + f"Current: {base_pct:.2%}", + f"Target: {target_pct:.2%}", + f"Min/Max: {min_pct:.2%}/{max_pct:.2%}", + f"Skew: {skew_pct:+.2%} (max {max_skew:.2%})" + ] + + # PnL data for right column + pnl_info = [] + if 'unrealized_pnl_pct' in self.processed_data: + pnl = self.processed_data['unrealized_pnl_pct'] + pnl_sign = "+" if pnl >= 0 else "" + pnl_info = [ + f"Unrealized: {pnl_sign}{pnl:.2%}", + f"Take Profit: {self.config.global_take_profit:.2%}", + f"Stop Loss: {-self.config.global_stop_loss:.2%}", + f"Leverage: {self.config.leverage}x" + ] + + # Display position and PnL info side by side with exact spacing + for pos_line, pnl_line in zip_longest(position_info, pnl_info, fillvalue=""): + status.append(f"│ {pos_line:<{half_width - 2}} │ {pnl_line:<{half_width - 2}} │") + + # Adjust visualization section - ensure consistent spacing + status.append(f"├{'─' * (inner_width)}┤") + status.append(f"│ {'VISUALIZATIONS':<{inner_width}} │") + status.append(f"├{'─' * (inner_width)}┤") + + # Position bar with exact spacing and characters filled_width = int(base_pct * bar_width) min_pos = int(min_pct * bar_width) max_pos = int(max_pct * bar_width) target_pos = int(target_pct * bar_width) - base_bar = "Base %: [" + + # Build position bar character by character + position_bar = "" for i in range(bar_width): if i == filled_width: - base_bar += "O" # Current position + position_bar += "◆" # Current position elif i == min_pos: - base_bar += "m" # Min threshold + position_bar += "┃" # Min threshold elif i == max_pos: - base_bar += "M" # Max threshold + position_bar += "┃" # Max threshold elif i == target_pos: - base_bar += "T" # Target threshold + position_bar += "┇" # Target threshold elif i < filled_width: - base_bar += "=" + position_bar += "█" # Filled area else: - base_bar += " " - base_bar += f"] {base_pct:.2%}" - status.append(base_bar) - status.append(f"Min: {min_pct:.2%} | Target: {target_pct:.2%} | Max: {max_pct:.2%}") - # Skew visualization - skew = base_pct - target_pct - skew_pct = skew / target_pct if target_pct != 0 else Decimal('0') - max_skew = getattr(self.config, 'max_skew', Decimal('0.0')) - skew_bar_width = 30 - skew_bar = "Skew: " + position_bar += "░" # Empty area + + # Ensure consistent label spacing as seen in screenshot + status.append(f"│ Position: [{position_bar}] │") + + # Skew visualization with exact spacing + skew_bar_width = bar_width center = skew_bar_width // 2 skew_pos = center + int(skew_pct * center * 2) - skew_pos = max(0, min(skew_bar_width, skew_pos)) + skew_pos = max(0, min(skew_bar_width - 1, skew_pos)) + + # Build skew bar character by character + skew_bar = "" for i in range(skew_bar_width): if i == center: - skew_bar += "|" # Center line + skew_bar += "┃" # Center line elif i == skew_pos: - skew_bar += "*" # Current skew + skew_bar += "⬤" # Current skew else: - skew_bar += "-" - skew_bar += f" {skew_pct:+.2%} (max: {max_skew:.2%})" - status.append(skew_bar) - # Active executors summary - status.append("\nActive Executors:") - active_buy = sum(1 for info in self.executors_info if self.get_trade_type_from_level_id(info.custom_info["level_id"]) == TradeType.BUY) - active_sell = sum(1 for info in self.executors_info if self.get_trade_type_from_level_id(info.custom_info["level_id"]) == TradeType.SELL) - status.append(f"Total: {len(self.executors_info)} (Buy: {active_buy}, Sell: {active_sell})") - # Deviation info + skew_bar += "─" # Empty line + + # Match spacing from screenshot with exact character counts + status.append(f"│ Skew: [{skew_bar}] │") + + # PnL visualization if available + if 'unrealized_pnl_pct' in self.processed_data: + pnl = self.processed_data['unrealized_pnl_pct'] + take_profit = self.config.global_take_profit + stop_loss = -self.config.global_stop_loss + + pnl_bar_width = bar_width + center = pnl_bar_width // 2 + + # Calculate positions with exact scaling + max_range = max(abs(take_profit), abs(stop_loss), abs(pnl)) * Decimal("1.2") + scale = (pnl_bar_width // 2) / max_range + + pnl_pos = center + int(pnl * scale) + take_profit_pos = center + int(take_profit * scale) + stop_loss_pos = center + int(stop_loss * scale) + + # Ensure positions are within bounds + pnl_pos = max(0, min(pnl_bar_width - 1, pnl_pos)) + take_profit_pos = max(0, min(pnl_bar_width - 1, take_profit_pos)) + stop_loss_pos = max(0, min(pnl_bar_width - 1, stop_loss_pos)) + + # Build PnL bar character by character + pnl_bar = "" + for i in range(pnl_bar_width): + if i == center: + pnl_bar += "│" # Center line + elif i == pnl_pos: + pnl_bar += "⬤" # Current PnL + elif i == take_profit_pos: + pnl_bar += "T" # Take profit line + elif i == stop_loss_pos: + pnl_bar += "S" # Stop loss line + elif (pnl >= 0 and center <= i < pnl_pos) or (pnl < 0 and pnl_pos < i <= center): + pnl_bar += "█" if pnl >= 0 else "▓" + else: + pnl_bar += "─" + + # Match spacing from screenshot + status.append(f"│ PnL: [{pnl_bar}] │") + + # Executors section with precise column widths + status.append(f"├{'─' * half_width}┬{'─' * half_width}┤") + status.append(f"│ {'EXECUTORS STATUS':<{half_width - 2}} │ {'EXECUTOR VISUALIZATION':<{half_width - 2}} │") + status.append(f"├{'─' * half_width}┼{'─' * half_width}┤") + + # Count active executors by type + active_buy = sum(1 for info in self.executors_info + if info.is_active and self.get_trade_type_from_level_id(info.custom_info["level_id"]) == TradeType.BUY) + active_sell = sum(1 for info in self.executors_info + if info.is_active and self.get_trade_type_from_level_id(info.custom_info["level_id"]) == TradeType.SELL) + total_active = sum(1 for info in self.executors_info if info.is_active) + + # Executor information with fixed formatting + executor_info = [ + f"Total Active: {total_active}", + f"Total Created: {len(self.executors_info)}", + f"Buy Executors: {active_buy}", + f"Sell Executors: {active_sell}" + ] + if 'deviation' in self.processed_data: - deviation = self.processed_data['deviation'] - status.append(f"Deviation: {deviation:.4f}") + executor_info.append(f"Target Deviation: {self.processed_data['deviation']:.4f}") + + # Visualization with consistent block characters for buy/sell representation + buy_bars = "▮" * active_buy if active_buy > 0 else "─" + sell_bars = "▮" * active_sell if active_sell > 0 else "─" + + executor_viz = [ + f"Buy: {buy_bars}", + f"Sell: {sell_bars}" + ] + + # Display with fixed width columns + for exec_line, viz_line in zip_longest(executor_info, executor_viz, fillvalue=""): + status.append(f"│ {exec_line:<{half_width - 2}} │ {viz_line:<{half_width - 2}} │") + + # Bottom border with exact width + status.append(f"╘{'═' * (inner_width)}╛") + return status diff --git a/bots/controllers/generic/pmm_adjusted.py b/bots/controllers/generic/pmm_adjusted.py new file mode 100644 index 00000000..e9bc2667 --- /dev/null +++ b/bots/controllers/generic/pmm_adjusted.py @@ -0,0 +1,669 @@ +from decimal import Decimal +from typing import List, Optional, Tuple, Union + +from pydantic import Field, field_validator +from pydantic_core.core_schema import ValidationInfo + +from hummingbot.core.data_type.common import MarketDict, OrderType, PositionMode, PriceType, TradeType +from hummingbot.data_feed.candles_feed.data_types import CandlesConfig +from hummingbot.strategy_v2.controllers.controller_base import ControllerBase, ControllerConfigBase +from hummingbot.strategy_v2.executors.data_types import ConnectorPair +from hummingbot.strategy_v2.executors.order_executor.data_types import ExecutionStrategy, OrderExecutorConfig +from hummingbot.strategy_v2.executors.position_executor.data_types import PositionExecutorConfig, TripleBarrierConfig +from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction, StopExecutorAction +from hummingbot.strategy_v2.models.executors import CloseType + + +class PMMAdjustedConfig(ControllerConfigBase): + """ + This class represents the base configuration for a market making controller. + """ + controller_type: str = "generic" + controller_name: str = "pmm_adjusted" + candles_config: List[CandlesConfig] = [] + connector_name: str = Field( + default="binance", + json_schema_extra={ + "prompt_on_new": True, + "prompt": "Enter the name of the connector to use (e.g., binance):", + } + ) + trading_pair: str = Field( + default="BTC-FDUSD", + json_schema_extra={ + "prompt_on_new": True, + "prompt": "Enter the trading pair to trade on (e.g., BTC-FDUSD):", + } + ) + candles_connector_name: str = Field(default="binance") + candles_trading_pair: str = Field(default="BTC-USDT") + candles_interval: str = Field(default="1s") + + portfolio_allocation: Decimal = Field( + default=Decimal("0.05"), + json_schema_extra={ + "prompt_on_new": True, + "prompt": "Enter the maximum quote exposure percentage around mid price (e.g., 0.05 for 5% of total quote allocation):", + } + ) + target_base_pct: Decimal = Field( + default=Decimal("0.2"), + json_schema_extra={ + "prompt_on_new": True, + "prompt": "Enter the target base percentage (e.g., 0.2 for 20%):", + } + ) + min_base_pct: Decimal = Field( + default=Decimal("0.1"), + json_schema_extra={ + "prompt_on_new": True, + "prompt": "Enter the minimum base percentage (e.g., 0.1 for 10%):", + } + ) + max_base_pct: Decimal = Field( + default=Decimal("0.4"), + json_schema_extra={ + "prompt_on_new": True, + "prompt": "Enter the maximum base percentage (e.g., 0.4 for 40%):", + } + ) + buy_spreads: List[float] = Field( + default="0.01,0.02", + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter a comma-separated list of buy spreads (e.g., '0.01, 0.02'):", + } + ) + sell_spreads: List[float] = Field( + default="0.01,0.02", + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter a comma-separated list of sell spreads (e.g., '0.01, 0.02'):", + } + ) + buy_amounts_pct: Union[List[Decimal], None] = Field( + default=None, + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter a comma-separated list of buy amounts as percentages (e.g., '50, 50'), or leave blank to distribute equally:", + } + ) + sell_amounts_pct: Union[List[Decimal], None] = Field( + default=None, + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter a comma-separated list of sell amounts as percentages (e.g., '50, 50'), or leave blank to distribute equally:", + } + ) + executor_refresh_time: int = Field( + default=60 * 5, + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter the refresh time in seconds for executors (e.g., 300 for 5 minutes):", + } + ) + cooldown_time: int = Field( + default=15, + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter the cooldown time in seconds between after replacing an executor that traded (e.g., 15):", + } + ) + leverage: int = Field( + default=20, + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter the leverage to use for trading (e.g., 20 for 20x leverage). Set it to 1 for spot trading:", + } + ) + position_mode: PositionMode = Field(default="HEDGE") + take_profit: Optional[Decimal] = Field( + default=Decimal("0.02"), gt=0, + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter the take profit as a decimal (e.g., 0.02 for 2%):", + } + ) + take_profit_order_type: Optional[OrderType] = Field( + default="LIMIT_MAKER", + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter the order type for take profit (e.g., LIMIT_MAKER):", + } + ) + max_skew: Decimal = Field( + default=Decimal("1.0"), + json_schema_extra={ + "prompt_on_new": True, "is_updatable": True, + "prompt": "Enter the maximum skew factor (e.g., 1.0):", + } + ) + global_take_profit: Decimal = Decimal("0.02") + global_stop_loss: Decimal = Decimal("0.05") + + @field_validator("take_profit", mode="before") + @classmethod + def validate_target(cls, v): + if isinstance(v, str): + if v == "": + return None + return Decimal(v) + return v + + @field_validator('take_profit_order_type', mode="before") + @classmethod + def validate_order_type(cls, v) -> OrderType: + if isinstance(v, OrderType): + return v + elif v is None: + return OrderType.MARKET + elif isinstance(v, str): + if v.upper() in OrderType.__members__: + return OrderType[v.upper()] + elif isinstance(v, int): + try: + return OrderType(v) + except ValueError: + pass + raise ValueError(f"Invalid order type: {v}. Valid options are: {', '.join(OrderType.__members__)}") + + @field_validator('buy_spreads', 'sell_spreads', mode="before") + @classmethod + def parse_spreads(cls, v): + if v is None: + return [] + if isinstance(v, str): + if v == "": + return [] + return [float(x.strip()) for x in v.split(',')] + return v + + @field_validator('buy_amounts_pct', 'sell_amounts_pct', mode="before") + @classmethod + def parse_and_validate_amounts(cls, v, validation_info: ValidationInfo): + field_name = validation_info.field_name + if v is None or v == "": + spread_field = field_name.replace('amounts_pct', 'spreads') + return [1 for _ in validation_info.data[spread_field]] + if isinstance(v, str): + return [float(x.strip()) for x in v.split(',')] + elif isinstance(v, list) and len(v) != len(validation_info.data[field_name.replace('amounts_pct', 'spreads')]): + raise ValueError( + f"The number of {field_name} must match the number of {field_name.replace('amounts_pct', 'spreads')}.") + return v + + @field_validator('position_mode', mode="before") + @classmethod + def validate_position_mode(cls, v) -> PositionMode: + if isinstance(v, str): + if v.upper() in PositionMode.__members__: + return PositionMode[v.upper()] + raise ValueError(f"Invalid position mode: {v}. Valid options are: {', '.join(PositionMode.__members__)}") + return v + + @property + def triple_barrier_config(self) -> TripleBarrierConfig: + return TripleBarrierConfig( + take_profit=self.take_profit, + trailing_stop=None, + open_order_type=OrderType.LIMIT_MAKER, # Defaulting to LIMIT as is a Maker Controller + take_profit_order_type=self.take_profit_order_type, + stop_loss_order_type=OrderType.MARKET, # Defaulting to MARKET as per requirement + time_limit_order_type=OrderType.MARKET # Defaulting to MARKET as per requirement + ) + + def update_parameters(self, trade_type: TradeType, new_spreads: Union[List[float], str], new_amounts_pct: Optional[Union[List[int], str]] = None): + spreads_field = 'buy_spreads' if trade_type == TradeType.BUY else 'sell_spreads' + amounts_pct_field = 'buy_amounts_pct' if trade_type == TradeType.BUY else 'sell_amounts_pct' + + setattr(self, spreads_field, self.parse_spreads(new_spreads)) + if new_amounts_pct is not None: + setattr(self, amounts_pct_field, self.parse_and_validate_amounts(new_amounts_pct, self.__dict__, self.__fields__[amounts_pct_field])) + else: + setattr(self, amounts_pct_field, [1 for _ in getattr(self, spreads_field)]) + + def get_spreads_and_amounts_in_quote(self, trade_type: TradeType) -> Tuple[List[float], List[float]]: + buy_amounts_pct = getattr(self, 'buy_amounts_pct') + sell_amounts_pct = getattr(self, 'sell_amounts_pct') + + # Calculate total percentages across buys and sells + total_pct = sum(buy_amounts_pct) + sum(sell_amounts_pct) + + # Normalize amounts_pct based on total percentages + if trade_type == TradeType.BUY: + normalized_amounts_pct = [amt_pct / total_pct for amt_pct in buy_amounts_pct] + else: # TradeType.SELL + normalized_amounts_pct = [amt_pct / total_pct for amt_pct in sell_amounts_pct] + + spreads = getattr(self, f'{trade_type.name.lower()}_spreads') + return spreads, [amt_pct * self.total_amount_quote * self.portfolio_allocation for amt_pct in normalized_amounts_pct] + + def update_markets(self, markets: MarketDict) -> MarketDict: + return markets.add_or_update(self.connector_name, self.trading_pair) + + +class PMMAdjusted(ControllerBase): + """ + This class represents the base class for a market making controller. + """ + + def __init__(self, config: PMMAdjustedConfig, *args, **kwargs): + super().__init__(config, *args, **kwargs) + self.config = config + self.market_data_provider.initialize_rate_sources([ConnectorPair( + connector_name=config.connector_name, trading_pair=config.trading_pair)]) + self.config.candles_config = [ + CandlesConfig(connector=self.config.candles_connector_name, + trading_pair=self.config.candles_trading_pair, + interval=self.config.candles_interval) + ] + + def determine_executor_actions(self) -> List[ExecutorAction]: + """ + Determine actions based on the provided executor handler report. + """ + actions = [] + actions.extend(self.create_actions_proposal()) + actions.extend(self.stop_actions_proposal()) + return actions + + def create_actions_proposal(self) -> List[ExecutorAction]: + """ + Create actions proposal based on the current state of the controller. + """ + create_actions = [] + + # Check if a position reduction executor for TP/SL is already sent + reduction_executor_exists = any( + executor.is_active and + executor.custom_info.get("level_id") == "global_tp_sl" + for executor in self.executors_info + ) + + if (not reduction_executor_exists and + self.processed_data["current_base_pct"] > self.config.target_base_pct and + (self.processed_data["unrealized_pnl_pct"] > self.config.global_take_profit or + self.processed_data["unrealized_pnl_pct"] < -self.config.global_stop_loss)): + + # Create a global take profit or stop loss executor + create_actions.append(CreateExecutorAction( + controller_id=self.config.id, + executor_config=OrderExecutorConfig( + timestamp=self.market_data_provider.time(), + connector_name=self.config.connector_name, + trading_pair=self.config.trading_pair, + side=TradeType.SELL, + amount=self.processed_data["position_amount"], + execution_strategy=ExecutionStrategy.MARKET, + price=self.processed_data["reference_price"], + level_id="global_tp_sl" # Use a specific level_id to identify this as a TP/SL executor + ) + )) + return create_actions + levels_to_execute = self.get_levels_to_execute() + # Pre-calculate all spreads and amounts for buy and sell sides + buy_spreads, buy_amounts_quote = self.config.get_spreads_and_amounts_in_quote(TradeType.BUY) + sell_spreads, sell_amounts_quote = self.config.get_spreads_and_amounts_in_quote(TradeType.SELL) + reference_price = Decimal(self.processed_data["reference_price"]) + # Get current position info for skew calculation + current_pct = self.processed_data["current_base_pct"] + min_pct = self.config.min_base_pct + max_pct = self.config.max_base_pct + # Calculate skew factors (0 to 1) - how much to scale orders + if max_pct > min_pct: # Prevent division by zero + # For buys: full size at min_pct, decreasing as we approach max_pct + buy_skew = (max_pct - current_pct) / (max_pct - min_pct) + # For sells: full size at max_pct, decreasing as we approach min_pct + sell_skew = (current_pct - min_pct) / (max_pct - min_pct) + # Ensure values stay between 0.2 and 1.0 (never go below 20% of original size) + buy_skew = max(min(buy_skew, Decimal("1.0")), self.config.max_skew) + sell_skew = max(min(sell_skew, Decimal("1.0")), self.config.max_skew) + else: + buy_skew = sell_skew = Decimal("1.0") + # Create executors for each level + for level_id in levels_to_execute: + trade_type = self.get_trade_type_from_level_id(level_id) + level = self.get_level_from_level_id(level_id) + if trade_type == TradeType.BUY: + spread_in_pct = Decimal(buy_spreads[level]) * Decimal(self.processed_data["spread_multiplier"]) + amount_quote = Decimal(buy_amounts_quote[level]) + skew = buy_skew + else: # TradeType.SELL + spread_in_pct = Decimal(sell_spreads[level]) * Decimal(self.processed_data["spread_multiplier"]) + amount_quote = Decimal(sell_amounts_quote[level]) + skew = sell_skew + # Calculate price + side_multiplier = Decimal("-1") if trade_type == TradeType.BUY else Decimal("1") + price = reference_price * (Decimal("1") + side_multiplier * spread_in_pct) + # Calculate amount with skew applied + amount = self.market_data_provider.quantize_order_amount(self.config.connector_name, + self.config.trading_pair, + (amount_quote / price) * skew) + if amount == Decimal("0"): + self.logger().warning(f"The amount of the level {level_id} is 0. Skipping.") + executor_config = self.get_executor_config(level_id, price, amount) + if executor_config is not None: + create_actions.append(CreateExecutorAction( + controller_id=self.config.id, + executor_config=executor_config + )) + return create_actions + + def get_levels_to_execute(self) -> List[str]: + working_levels = self.filter_executors( + executors=self.executors_info, + filter_func=lambda x: x.is_active or (x.close_type == CloseType.STOP_LOSS and self.market_data_provider.time() - x.close_timestamp < self.config.cooldown_time) + ) + working_levels_ids = [executor.custom_info["level_id"] for executor in working_levels] + return self.get_not_active_levels_ids(working_levels_ids) + + def stop_actions_proposal(self) -> List[ExecutorAction]: + """ + Create a list of actions to stop the executors based on order refresh and early stop conditions. + """ + stop_actions = [] + stop_actions.extend(self.executors_to_refresh()) + stop_actions.extend(self.executors_to_early_stop()) + return stop_actions + + def executors_to_refresh(self) -> List[ExecutorAction]: + executors_to_refresh = self.filter_executors( + executors=self.executors_info, + filter_func=lambda x: not x.is_trading and x.is_active and self.market_data_provider.time() - x.timestamp > self.config.executor_refresh_time) + return [StopExecutorAction( + controller_id=self.config.id, + keep_position=True, + executor_id=executor.id) for executor in executors_to_refresh] + + def executors_to_early_stop(self) -> List[ExecutorAction]: + """ + Get the executors to early stop based on the current state of market data. This method can be overridden to + implement custom behavior. + """ + executors_to_early_stop = self.filter_executors( + executors=self.executors_info, + filter_func=lambda x: x.is_active and x.is_trading and self.market_data_provider.time() - x.custom_info["open_order_last_update"] > self.config.cooldown_time) + return [StopExecutorAction( + controller_id=self.config.id, + keep_position=True, + executor_id=executor.id) for executor in executors_to_early_stop] + + async def update_processed_data(self): + """ + Update the processed data for the controller. This method should be reimplemented to modify the reference price + and spread multiplier based on the market data. By default, it will update the reference price as mid price and + the spread multiplier as 1. + """ + reference_price = self.get_current_candles_price() + position_held = next((position for position in self.positions_held if + (position.trading_pair == self.config.trading_pair) & + (position.connector_name == self.config.connector_name)), None) + target_position = self.config.total_amount_quote * self.config.target_base_pct + if position_held is not None: + position_amount = position_held.amount + current_base_pct = position_held.amount_quote / self.config.total_amount_quote + deviation = (target_position - position_held.amount_quote) / target_position + unrealized_pnl_pct = position_held.unrealized_pnl_quote / position_held.amount_quote if position_held.amount_quote != 0 else Decimal("0") + else: + position_amount = 0 + current_base_pct = 0 + deviation = 1 + unrealized_pnl_pct = 0 + + self.processed_data = {"reference_price": Decimal(reference_price), "spread_multiplier": Decimal("1"), + "deviation": deviation, "current_base_pct": current_base_pct, + "unrealized_pnl_pct": unrealized_pnl_pct, "position_amount": position_amount} + + def get_current_candles_price(self) -> Decimal: + """ + Get the current price from the candles data provider. + """ + candles = self.market_data_provider.get_candles_df(self.config.candles_connector_name, + self.config.candles_trading_pair, + self.config.candles_interval) + if candles is not None and not candles.empty: + last_candle = candles.iloc[-1] + return Decimal(last_candle['close']) + else: + self.logger().warning(f"No candles data available for {self.config.candles_connector_name} - {self.config.candles_trading_pair} at {self.config.candles_interval}. Using last known price.") + return Decimal(self.market_data_provider.get_price_by_type(self.config.connector_name, self.config.trading_pair, PriceType.MidPrice)) + + def get_executor_config(self, level_id: str, price: Decimal, amount: Decimal): + """ + Get the executor config for a given level id. + """ + trade_type = self.get_trade_type_from_level_id(level_id) + level_multiplier = self.get_level_from_level_id(level_id) + 1 + return PositionExecutorConfig( + timestamp=self.market_data_provider.time(), + level_id=level_id, + connector_name=self.config.connector_name, + trading_pair=self.config.trading_pair, + entry_price=price, + amount=amount, + triple_barrier_config=self.config.triple_barrier_config.new_instance_with_adjusted_volatility(level_multiplier), + leverage=self.config.leverage, + side=trade_type, + ) + + def get_level_id_from_side(self, trade_type: TradeType, level: int) -> str: + """ + Get the level id based on the trade type and the level. + """ + return f"{trade_type.name.lower()}_{level}" + + def get_trade_type_from_level_id(self, level_id: str) -> TradeType: + return TradeType.BUY if level_id.startswith("buy") else TradeType.SELL + + def get_level_from_level_id(self, level_id: str) -> int: + return int(level_id.split('_')[1]) + + def get_not_active_levels_ids(self, active_levels_ids: List[str]) -> List[str]: + """ + Get the levels to execute based on the current state of the controller. + """ + buy_ids_missing = [self.get_level_id_from_side(TradeType.BUY, level) for level in range(len(self.config.buy_spreads)) + if self.get_level_id_from_side(TradeType.BUY, level) not in active_levels_ids] + sell_ids_missing = [self.get_level_id_from_side(TradeType.SELL, level) for level in range(len(self.config.sell_spreads)) + if self.get_level_id_from_side(TradeType.SELL, level) not in active_levels_ids] + if self.processed_data["current_base_pct"] < self.config.min_base_pct: + return buy_ids_missing + elif self.processed_data["current_base_pct"] > self.config.max_base_pct: + return sell_ids_missing + return buy_ids_missing + sell_ids_missing + + def to_format_status(self) -> List[str]: + """ + Get the status of the controller in a formatted way with ASCII visualizations. + """ + from decimal import Decimal + from itertools import zip_longest + + status = [] + + # Get all required data + base_pct = self.processed_data['current_base_pct'] + min_pct = self.config.min_base_pct + max_pct = self.config.max_base_pct + target_pct = self.config.target_base_pct + skew = base_pct - target_pct + skew_pct = skew / target_pct if target_pct != 0 else Decimal('0') + max_skew = getattr(self.config, 'max_skew', Decimal('0.0')) + + # Fixed widths - adjusted based on screenshot analysis + outer_width = 92 # Total width including outer borders + inner_width = outer_width - 4 # Inner content width + half_width = (inner_width) // 2 - 1 # Width of each column in split sections + bar_width = inner_width - 15 # Width of visualization bars (accounting for label) + + # Header - omit ID since it's shown above in controller header + status.append("╒" + "═" * (inner_width) + "╕") + + header_line = ( + f"{self.config.connector_name}:{self.config.trading_pair} " + f"Price: {self.processed_data['reference_price']} " + f"Alloc: {self.config.portfolio_allocation:.1%} " + f"Spread Mult: {self.processed_data['spread_multiplier']} |" + ) + + status.append(f"│ {header_line:<{inner_width}} │") + + # Position and PnL sections with precise widths + status.append(f"├{'─' * half_width}┬{'─' * half_width}┤") + status.append(f"│ {'POSITION STATUS':<{half_width - 2}} │ {'PROFIT & LOSS':<{half_width - 2}} │") + status.append(f"├{'─' * half_width}┼{'─' * half_width}┤") + + # Position data for left column + position_info = [ + f"Current: {base_pct:.2%}", + f"Target: {target_pct:.2%}", + f"Min/Max: {min_pct:.2%}/{max_pct:.2%}", + f"Skew: {skew_pct:+.2%} (max {max_skew:.2%})" + ] + + # PnL data for right column + pnl_info = [] + if 'unrealized_pnl_pct' in self.processed_data: + pnl = self.processed_data['unrealized_pnl_pct'] + pnl_sign = "+" if pnl >= 0 else "" + pnl_info = [ + f"Unrealized: {pnl_sign}{pnl:.2%}", + f"Take Profit: {self.config.global_take_profit:.2%}", + f"Stop Loss: {-self.config.global_stop_loss:.2%}", + f"Leverage: {self.config.leverage}x" + ] + + # Display position and PnL info side by side with exact spacing + for pos_line, pnl_line in zip_longest(position_info, pnl_info, fillvalue=""): + status.append(f"│ {pos_line:<{half_width - 2}} │ {pnl_line:<{half_width - 2}} │") + + # Adjust visualization section - ensure consistent spacing + status.append(f"├{'─' * (inner_width)}┤") + status.append(f"│ {'VISUALIZATIONS':<{inner_width}} │") + status.append(f"├{'─' * (inner_width)}┤") + + # Position bar with exact spacing and characters + filled_width = int(base_pct * bar_width) + min_pos = int(min_pct * bar_width) + max_pos = int(max_pct * bar_width) + target_pos = int(target_pct * bar_width) + + # Build position bar character by character + position_bar = "" + for i in range(bar_width): + if i == filled_width: + position_bar += "◆" # Current position + elif i == min_pos: + position_bar += "┃" # Min threshold + elif i == max_pos: + position_bar += "┃" # Max threshold + elif i == target_pos: + position_bar += "┇" # Target threshold + elif i < filled_width: + position_bar += "█" # Filled area + else: + position_bar += "░" # Empty area + + # Ensure consistent label spacing as seen in screenshot + status.append(f"│ Position: [{position_bar}] │") + + # Skew visualization with exact spacing + skew_bar_width = bar_width + center = skew_bar_width // 2 + skew_pos = center + int(skew_pct * center * 2) + skew_pos = max(0, min(skew_bar_width - 1, skew_pos)) + + # Build skew bar character by character + skew_bar = "" + for i in range(skew_bar_width): + if i == center: + skew_bar += "┃" # Center line + elif i == skew_pos: + skew_bar += "⬤" # Current skew + else: + skew_bar += "─" # Empty line + + # Match spacing from screenshot with exact character counts + status.append(f"│ Skew: [{skew_bar}] │") + + # PnL visualization if available + if 'unrealized_pnl_pct' in self.processed_data: + pnl = self.processed_data['unrealized_pnl_pct'] + take_profit = self.config.global_take_profit + stop_loss = -self.config.global_stop_loss + + pnl_bar_width = bar_width + center = pnl_bar_width // 2 + + # Calculate positions with exact scaling + max_range = max(abs(take_profit), abs(stop_loss), abs(pnl)) * Decimal("1.2") + scale = (pnl_bar_width // 2) / max_range + + pnl_pos = center + int(pnl * scale) + take_profit_pos = center + int(take_profit * scale) + stop_loss_pos = center + int(stop_loss * scale) + + # Ensure positions are within bounds + pnl_pos = max(0, min(pnl_bar_width - 1, pnl_pos)) + take_profit_pos = max(0, min(pnl_bar_width - 1, take_profit_pos)) + stop_loss_pos = max(0, min(pnl_bar_width - 1, stop_loss_pos)) + + # Build PnL bar character by character + pnl_bar = "" + for i in range(pnl_bar_width): + if i == center: + pnl_bar += "│" # Center line + elif i == pnl_pos: + pnl_bar += "⬤" # Current PnL + elif i == take_profit_pos: + pnl_bar += "T" # Take profit line + elif i == stop_loss_pos: + pnl_bar += "S" # Stop loss line + elif (pnl >= 0 and center <= i < pnl_pos) or (pnl < 0 and pnl_pos < i <= center): + pnl_bar += "█" if pnl >= 0 else "▓" + else: + pnl_bar += "─" + + # Match spacing from screenshot + status.append(f"│ PnL: [{pnl_bar}] │") + + # Executors section with precise column widths + status.append(f"├{'─' * half_width}┬{'─' * half_width}┤") + status.append(f"│ {'EXECUTORS STATUS':<{half_width - 2}} │ {'EXECUTOR VISUALIZATION':<{half_width - 2}} │") + status.append(f"├{'─' * half_width}┼{'─' * half_width}┤") + + # Count active executors by type + active_buy = sum(1 for info in self.executors_info + if info.is_active and self.get_trade_type_from_level_id(info.custom_info["level_id"]) == TradeType.BUY) + active_sell = sum(1 for info in self.executors_info + if info.is_active and self.get_trade_type_from_level_id(info.custom_info["level_id"]) == TradeType.SELL) + total_active = sum(1 for info in self.executors_info if info.is_active) + + # Executor information with fixed formatting + executor_info = [ + f"Total Active: {total_active}", + f"Total Created: {len(self.executors_info)}", + f"Buy Executors: {active_buy}", + f"Sell Executors: {active_sell}" + ] + + if 'deviation' in self.processed_data: + executor_info.append(f"Target Deviation: {self.processed_data['deviation']:.4f}") + + # Visualization with consistent block characters for buy/sell representation + buy_bars = "▮" * active_buy if active_buy > 0 else "─" + sell_bars = "▮" * active_sell if active_sell > 0 else "─" + + executor_viz = [ + f"Buy: {buy_bars}", + f"Sell: {sell_bars}" + ] + + # Display with fixed width columns + for exec_line, viz_line in zip_longest(executor_info, executor_viz, fillvalue=""): + status.append(f"│ {exec_line:<{half_width - 2}} │ {viz_line:<{half_width - 2}} │") + + # Bottom border with exact width + status.append(f"╘{'═' * (inner_width)}╛") + + return status diff --git a/bots/controllers/generic/stat_arb.py b/bots/controllers/generic/stat_arb.py new file mode 100644 index 00000000..527db07a --- /dev/null +++ b/bots/controllers/generic/stat_arb.py @@ -0,0 +1,475 @@ +from decimal import Decimal +from typing import List + +import numpy as np +from sklearn.linear_model import LinearRegression + +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, PriceType, TradeType +from hummingbot.data_feed.candles_feed.data_types import CandlesConfig +from hummingbot.strategy_v2.controllers import ControllerBase, ControllerConfigBase +from hummingbot.strategy_v2.executors.data_types import ConnectorPair, PositionSummary +from hummingbot.strategy_v2.executors.order_executor.data_types import ExecutionStrategy, OrderExecutorConfig +from hummingbot.strategy_v2.executors.position_executor.data_types import PositionExecutorConfig, TripleBarrierConfig +from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction, StopExecutorAction + + +class StatArbConfig(ControllerConfigBase): + """ + Configuration for a statistical arbitrage controller that trades two cointegrated assets. + """ + controller_type: str = "generic" + controller_name: str = "stat_arb" + candles_config: List[CandlesConfig] = [] + connector_pair_dominant: ConnectorPair = ConnectorPair(connector_name="binance_perpetual", trading_pair="SOL-USDT") + connector_pair_hedge: ConnectorPair = ConnectorPair(connector_name="binance_perpetual", trading_pair="POPCAT-USDT") + interval: str = "1m" + lookback_period: int = 300 + entry_threshold: Decimal = Decimal("2.0") + take_profit: Decimal = Decimal("0.0008") + tp_global: Decimal = Decimal("0.01") + sl_global: Decimal = Decimal("0.05") + min_amount_quote: Decimal = Decimal("10") + quoter_spread: Decimal = Decimal("0.0001") + quoter_cooldown: int = 30 + quoter_refresh: int = 10 + max_orders_placed_per_side: int = 2 + max_orders_filled_per_side: int = 2 + max_position_deviation: Decimal = Decimal("0.1") + pos_hedge_ratio: Decimal = Decimal("1.0") + leverage: int = 20 + position_mode: PositionMode = PositionMode.HEDGE + + @property + def triple_barrier_config(self) -> TripleBarrierConfig: + return TripleBarrierConfig( + take_profit=self.take_profit, + open_order_type=OrderType.LIMIT_MAKER, + take_profit_order_type=OrderType.LIMIT_MAKER, + ) + + def update_markets(self, markets: dict) -> dict: + """Update markets dictionary with both trading pairs""" + # Add dominant pair + if self.connector_pair_dominant.connector_name not in markets: + markets[self.connector_pair_dominant.connector_name] = set() + markets[self.connector_pair_dominant.connector_name].add(self.connector_pair_dominant.trading_pair) + + # Add hedge pair + if self.connector_pair_hedge.connector_name not in markets: + markets[self.connector_pair_hedge.connector_name] = set() + markets[self.connector_pair_hedge.connector_name].add(self.connector_pair_hedge.trading_pair) + + return markets + + +class StatArb(ControllerBase): + """ + Statistical arbitrage controller that trades two cointegrated assets. + """ + + def __init__(self, config: StatArbConfig, *args, **kwargs): + super().__init__(config, *args, **kwargs) + self.config = config + self.theoretical_dominant_quote = self.config.total_amount_quote * (1 / (1 + self.config.pos_hedge_ratio)) + self.theoretical_hedge_quote = self.config.total_amount_quote * (self.config.pos_hedge_ratio / (1 + self.config.pos_hedge_ratio)) + + # Initialize processed data dictionary + self.processed_data = { + "dominant_price": None, + "hedge_price": None, + "spread": None, + "z_score": None, + "hedge_ratio": None, + "position_dominant": Decimal("0"), + "position_hedge": Decimal("0"), + "active_orders_dominant": [], + "active_orders_hedge": [], + "pair_pnl": Decimal("0"), + "signal": 0 # 0: no signal, 1: long dominant/short hedge, -1: short dominant/long hedge + } + + # Setup candles config if not already set + if len(self.config.candles_config) == 0: + max_records = self.config.lookback_period + 20 # extra records for safety + self.max_records = max_records + self.config.candles_config = [ + CandlesConfig( + connector=self.config.connector_pair_dominant.connector_name, + trading_pair=self.config.connector_pair_dominant.trading_pair, + interval=self.config.interval, + max_records=max_records + ), + CandlesConfig( + connector=self.config.connector_pair_hedge.connector_name, + trading_pair=self.config.connector_pair_hedge.trading_pair, + interval=self.config.interval, + max_records=max_records + ) + ] + if "_perpetual" in self.config.connector_pair_dominant.connector_name: + connector = self.market_data_provider.get_connector(self.config.connector_pair_dominant.connector_name) + connector.set_position_mode(self.config.position_mode) + connector.set_leverage(self.config.connector_pair_dominant.trading_pair, self.config.leverage) + if "_perpetual" in self.config.connector_pair_hedge.connector_name: + connector = self.market_data_provider.get_connector(self.config.connector_pair_hedge.connector_name) + connector.set_position_mode(self.config.position_mode) + connector.set_leverage(self.config.connector_pair_hedge.trading_pair, self.config.leverage) + + def determine_executor_actions(self) -> List[ExecutorAction]: + """ + The execution logic for the statistical arbitrage strategy. + Market Data Conditions: Signal is generated based on the z-score of the spread between the two assets. + If signal == 1 --> long dominant/short hedge + If signal == -1 --> short dominant/long hedge + Execution Conditions: If the signal is generated add position executors to quote from the dominant and hedge markets. + We compare the current position with the theoretical position for the dominant and hedge assets. + If the current position + the active placed amount is greater than the theoretical position, can't place more orders. + If the imbalance scaled pct is greater than the threshold, we avoid placing orders in the market passed on filtered_connector_pair. + If the pnl of total position is greater than the take profit or lower than the stop loss, we close the position. + """ + actions: List[ExecutorAction] = [] + # Check global take profit and stop loss + if self.processed_data["pair_pnl_pct"] > self.config.tp_global or self.processed_data["pair_pnl_pct"] < -self.config.sl_global: + # Close all positions + for position in self.positions_held: + actions.extend(self.get_executors_to_reduce_position(position)) + return actions + # Check the signal + elif self.processed_data["signal"] != 0: + actions.extend(self.get_executors_to_quote()) + actions.extend(self.get_executors_to_reduce_position_on_opposite_signal()) + + # Get the executors to keep position after a cooldown is reached + actions.extend(self.get_executors_to_keep_position()) + actions.extend(self.get_executors_to_refresh()) + + return actions + + def get_executors_to_reduce_position_on_opposite_signal(self) -> List[ExecutorAction]: + if self.processed_data["signal"] == 1: + dominant_side, hedge_side = TradeType.SELL, TradeType.BUY + elif self.processed_data["signal"] == -1: + dominant_side, hedge_side = TradeType.BUY, TradeType.SELL + else: + return [] + # Get executors to stop + dominant_active_executors_to_stop = self.filter_executors(self.executors_info, filter_func=lambda e: e.connector_name == self.config.connector_pair_dominant.connector_name and e.trading_pair == self.config.connector_pair_dominant.trading_pair and e.side == dominant_side) + hedge_active_executors_to_stop = self.filter_executors(self.executors_info, filter_func=lambda e: e.connector_name == self.config.connector_pair_hedge.connector_name and e.trading_pair == self.config.connector_pair_hedge.trading_pair and e.side == hedge_side) + stop_actions = [StopExecutorAction(controller_id=self.config.id, executor_id=executor.id, keep_position=False) for executor in dominant_active_executors_to_stop + hedge_active_executors_to_stop] + + # Get order executors to reduce positions + reduce_actions: List[ExecutorAction] = [] + for position in self.positions_held: + if position.connector_name == self.config.connector_pair_dominant.connector_name and position.trading_pair == self.config.connector_pair_dominant.trading_pair and position.side == dominant_side: + reduce_actions.extend(self.get_executors_to_reduce_position(position)) + elif position.connector_name == self.config.connector_pair_hedge.connector_name and position.trading_pair == self.config.connector_pair_hedge.trading_pair and position.side == hedge_side: + reduce_actions.extend(self.get_executors_to_reduce_position(position)) + return stop_actions + reduce_actions + + def get_executors_to_keep_position(self) -> List[ExecutorAction]: + stop_actions: List[ExecutorAction] = [] + for executor in self.processed_data["executors_dominant_filled"] + self.processed_data["executors_hedge_filled"]: + if self.market_data_provider.time() - executor.timestamp >= self.config.quoter_cooldown: + # Create a new executor to keep the position + stop_actions.append(StopExecutorAction(controller_id=self.config.id, executor_id=executor.id, keep_position=True)) + return stop_actions + + def get_executors_to_refresh(self) -> List[ExecutorAction]: + refresh_actions: List[ExecutorAction] = [] + for executor in self.processed_data["executors_dominant_placed"] + self.processed_data["executors_hedge_placed"]: + if self.market_data_provider.time() - executor.timestamp >= self.config.quoter_refresh: + # Create a new executor to refresh the position + refresh_actions.append(StopExecutorAction(controller_id=self.config.id, executor_id=executor.id, keep_position=False)) + return refresh_actions + + def get_executors_to_quote(self) -> List[ExecutorAction]: + """ + Get Order Executor to quote from the dominant and hedge markets. + """ + actions: List[ExecutorAction] = [] + trade_type_dominant = TradeType.BUY if self.processed_data["signal"] == 1 else TradeType.SELL + trade_type_hedge = TradeType.SELL if self.processed_data["signal"] == 1 else TradeType.BUY + + # Analyze dominant active orders, max deviation and imbalance to create a new executor + if self.processed_data["dominant_gap"] > Decimal("0") and \ + self.processed_data["filter_connector_pair"] != self.config.connector_pair_dominant and \ + len(self.processed_data["executors_dominant_placed"]) < self.config.max_orders_placed_per_side and \ + len(self.processed_data["executors_dominant_filled"]) < self.config.max_orders_filled_per_side: + # Create Position Executor for dominant asset + if trade_type_dominant == TradeType.BUY: + price = self.processed_data["min_price_dominant"] * (1 - self.config.quoter_spread) + else: + price = self.processed_data["max_price_dominant"] * (1 + self.config.quoter_spread) + dominant_executor_config = PositionExecutorConfig( + timestamp=self.market_data_provider.time(), + connector_name=self.config.connector_pair_dominant.connector_name, + trading_pair=self.config.connector_pair_dominant.trading_pair, + side=trade_type_dominant, + entry_price=price, + amount=self.config.min_amount_quote / self.processed_data["dominant_price"], + triple_barrier_config=self.config.triple_barrier_config, + leverage=self.config.leverage, + ) + actions.append(CreateExecutorAction(controller_id=self.config.id, executor_config=dominant_executor_config)) + + # Analyze hedge active orders, max deviation and imbalance to create a new executor + if self.processed_data["hedge_gap"] > Decimal("0") and \ + self.processed_data["filter_connector_pair"] != self.config.connector_pair_hedge and \ + len(self.processed_data["executors_hedge_placed"]) < self.config.max_orders_placed_per_side and \ + len(self.processed_data["executors_hedge_filled"]) < self.config.max_orders_filled_per_side: + # Create Position Executor for hedge asset + if trade_type_hedge == TradeType.BUY: + price = self.processed_data["min_price_hedge"] * (1 - self.config.quoter_spread) + else: + price = self.processed_data["max_price_hedge"] * (1 + self.config.quoter_spread) + hedge_executor_config = PositionExecutorConfig( + timestamp=self.market_data_provider.time(), + connector_name=self.config.connector_pair_hedge.connector_name, + trading_pair=self.config.connector_pair_hedge.trading_pair, + side=trade_type_hedge, + entry_price=price, + amount=self.config.min_amount_quote / self.processed_data["hedge_price"], + triple_barrier_config=self.config.triple_barrier_config, + leverage=self.config.leverage, + ) + actions.append(CreateExecutorAction(controller_id=self.config.id, executor_config=hedge_executor_config)) + return actions + + def get_executors_to_reduce_position(self, position: PositionSummary) -> List[ExecutorAction]: + """ + Get Order Executor to reduce position. + """ + if position.amount > Decimal("0"): + # Close position + config = OrderExecutorConfig( + timestamp=self.market_data_provider.time(), + connector_name=position.connector_name, + trading_pair=position.trading_pair, + side=TradeType.BUY if position.side == TradeType.SELL else TradeType.SELL, + amount=position.amount, + position_action=PositionAction.CLOSE, + execution_strategy=ExecutionStrategy.MARKET, + leverage=self.config.leverage, + ) + return [CreateExecutorAction(controller_id=self.config.id, executor_config=config)] + return [] + + async def update_processed_data(self): + """ + Update processed data with the latest market information and statistical calculations + needed for the statistical arbitrage strategy. + """ + # Stat arb analysis + spread, z_score = self.get_spread_and_z_score() + + # Generate trading signal based on z-score + entry_threshold = float(self.config.entry_threshold) + if z_score > entry_threshold: + # Spread is too high, expect it to revert: long dominant, short hedge + signal = 1 + dominant_side, hedge_side = TradeType.BUY, TradeType.SELL + elif z_score < -entry_threshold: + # Spread is too low, expect it to revert: short dominant, long hedge + signal = -1 + dominant_side, hedge_side = TradeType.SELL, TradeType.BUY + else: + # No signal + signal = 0 + dominant_side, hedge_side = None, None + + # Current prices + dominant_price, hedge_price = self.get_pairs_prices() + + # Get current positions stats by signal + positions_dominant = next((position for position in self.positions_held if position.connector_name == self.config.connector_pair_dominant.connector_name and position.trading_pair == self.config.connector_pair_dominant.trading_pair and (position.side == dominant_side or dominant_side is None)), None) + positions_hedge = next((position for position in self.positions_held if position.connector_name == self.config.connector_pair_hedge.connector_name and position.trading_pair == self.config.connector_pair_hedge.trading_pair and (position.side == hedge_side or hedge_side is None)), None) + # Get position stats + position_dominant_quote = positions_dominant.amount_quote if positions_dominant else Decimal("0") + position_hedge_quote = positions_hedge.amount_quote if positions_hedge else Decimal("0") + position_dominant_pnl_quote = positions_dominant.global_pnl_quote if positions_dominant else Decimal("0") + position_hedge_pnl_quote = positions_hedge.global_pnl_quote if positions_hedge else Decimal("0") + pair_pnl_pct = (position_dominant_pnl_quote + position_hedge_pnl_quote) / (position_dominant_quote + position_hedge_quote) if (position_dominant_quote + position_hedge_quote) != 0 else Decimal("0") + # Get active executors + executors_dominant_placed, executors_dominant_filled = self.get_executors_dominant() + executors_hedge_placed, executors_hedge_filled = self.get_executors_hedge() + min_price_dominant = Decimal(str(min([executor.config.entry_price for executor in executors_dominant_placed]))) if executors_dominant_placed else None + max_price_dominant = Decimal(str(max([executor.config.entry_price for executor in executors_dominant_placed]))) if executors_dominant_placed else None + min_price_hedge = Decimal(str(min([executor.config.entry_price for executor in executors_hedge_placed]))) if executors_hedge_placed else None + max_price_hedge = Decimal(str(max([executor.config.entry_price for executor in executors_hedge_placed]))) if executors_hedge_placed else None + + active_amount_dominant = Decimal(str(sum([executor.filled_amount_quote for executor in executors_dominant_filled]))) + active_amount_hedge = Decimal(str(sum([executor.filled_amount_quote for executor in executors_hedge_filled]))) + + # Compute imbalance based on the hedge ratio + dominant_gap = self.theoretical_dominant_quote - position_dominant_quote - active_amount_dominant + hedge_gap = self.theoretical_hedge_quote - position_hedge_quote - active_amount_hedge + imbalance = position_dominant_quote - position_hedge_quote + imbalance_scaled = position_dominant_quote - position_hedge_quote * self.config.pos_hedge_ratio + imbalance_scaled_pct = imbalance_scaled / position_dominant_quote if position_dominant_quote != Decimal("0") else Decimal("0") + filter_connector_pair = None + if imbalance_scaled_pct > self.config.max_position_deviation: + # Avoid placing orders in the dominant market + filter_connector_pair = self.config.connector_pair_dominant + elif imbalance_scaled_pct < -self.config.max_position_deviation: + # Avoid placing orders in the hedge market + filter_connector_pair = self.config.connector_pair_hedge + + # Update processed data + self.processed_data.update({ + "dominant_price": Decimal(str(dominant_price)), + "hedge_price": Decimal(str(hedge_price)), + "spread": Decimal(str(spread)), + "z_score": Decimal(str(z_score)), + "dominant_gap": Decimal(str(dominant_gap)), + "hedge_gap": Decimal(str(hedge_gap)), + "position_dominant_quote": position_dominant_quote, + "position_hedge_quote": position_hedge_quote, + "active_amount_dominant": active_amount_dominant, + "active_amount_hedge": active_amount_hedge, + "signal": signal, + # Store full dataframes for reference + "imbalance": Decimal(str(imbalance)), + "imbalance_scaled_pct": Decimal(str(imbalance_scaled_pct)), + "filter_connector_pair": filter_connector_pair, + "min_price_dominant": min_price_dominant if min_price_dominant is not None else Decimal(str(dominant_price)), + "max_price_dominant": max_price_dominant if max_price_dominant is not None else Decimal(str(dominant_price)), + "min_price_hedge": min_price_hedge if min_price_hedge is not None else Decimal(str(hedge_price)), + "max_price_hedge": max_price_hedge if max_price_hedge is not None else Decimal(str(hedge_price)), + "executors_dominant_filled": executors_dominant_filled, + "executors_hedge_filled": executors_hedge_filled, + "executors_dominant_placed": executors_dominant_placed, + "executors_hedge_placed": executors_hedge_placed, + "pair_pnl_pct": pair_pnl_pct, + }) + + def get_spread_and_z_score(self): + # Fetch candle data for both assets + dominant_df = self.market_data_provider.get_candles_df( + connector_name=self.config.connector_pair_dominant.connector_name, + trading_pair=self.config.connector_pair_dominant.trading_pair, + interval=self.config.interval, + max_records=self.max_records + ) + + hedge_df = self.market_data_provider.get_candles_df( + connector_name=self.config.connector_pair_hedge.connector_name, + trading_pair=self.config.connector_pair_hedge.trading_pair, + interval=self.config.interval, + max_records=self.max_records + ) + + if dominant_df.empty or hedge_df.empty: + self.logger().warning("Not enough candle data available for statistical analysis") + return + + # Extract close prices + dominant_prices = dominant_df['close'].values + hedge_prices = hedge_df['close'].values + + # Ensure we have enough data and both series have the same length + min_length = min(len(dominant_prices), len(hedge_prices)) + if min_length < self.config.lookback_period: + self.logger().warning( + f"Not enough data points for analysis. Required: {self.config.lookback_period}, Available: {min_length}") + return + + # Use the most recent data points + dominant_prices = dominant_prices[-self.config.lookback_period:] + hedge_prices = hedge_prices[-self.config.lookback_period:] + + # Convert to numpy arrays + dominant_prices_np = np.array(dominant_prices, dtype=float) + hedge_prices_np = np.array(hedge_prices, dtype=float) + + # Calculate percentage returns + dominant_pct_change = np.diff(dominant_prices_np) / dominant_prices_np[:-1] + hedge_pct_change = np.diff(hedge_prices_np) / hedge_prices_np[:-1] + + # Convert to cumulative returns + dominant_cum_returns = np.cumprod(dominant_pct_change + 1) + hedge_cum_returns = np.cumprod(hedge_pct_change + 1) + + # Normalize to start at 1 + dominant_cum_returns = dominant_cum_returns / dominant_cum_returns[0] if len(dominant_cum_returns) > 0 else np.array([1.0]) + hedge_cum_returns = hedge_cum_returns / hedge_cum_returns[0] if len(hedge_cum_returns) > 0 else np.array([1.0]) + + # Perform linear regression + dominant_cum_returns_reshaped = dominant_cum_returns.reshape(-1, 1) + reg = LinearRegression().fit(dominant_cum_returns_reshaped, hedge_cum_returns) + alpha = reg.intercept_ + beta = reg.coef_[0] + self.processed_data.update({ + "alpha": alpha, + "beta": beta, + }) + + # Calculate spread as percentage difference from predicted value + y_pred = alpha + beta * dominant_cum_returns + spread_pct = (hedge_cum_returns - y_pred) / y_pred * 100 + + # Calculate z-score + mean_spread = np.mean(spread_pct) + std_spread = np.std(spread_pct) + if std_spread == 0: + self.logger().warning("Standard deviation of spread is zero, cannot calculate z-score") + return + + current_spread = spread_pct[-1] + current_z_score = (current_spread - mean_spread) / std_spread + + return current_spread, current_z_score + + def get_pairs_prices(self): + current_dominant_price = self.market_data_provider.get_price_by_type( + connector_name=self.config.connector_pair_dominant.connector_name, + trading_pair=self.config.connector_pair_dominant.trading_pair, price_type=PriceType.MidPrice) + + current_hedge_price = self.market_data_provider.get_price_by_type( + connector_name=self.config.connector_pair_hedge.connector_name, + trading_pair=self.config.connector_pair_hedge.trading_pair, price_type=PriceType.MidPrice) + return current_dominant_price, current_hedge_price + + def get_executors_dominant(self): + active_executors_dominant_placed = self.filter_executors( + self.executors_info, + filter_func=lambda e: e.connector_name == self.config.connector_pair_dominant.connector_name and e.trading_pair == self.config.connector_pair_dominant.trading_pair and e.is_active and not e.is_trading and e.type == "position_executor" + ) + active_executors_dominant_filled = self.filter_executors( + self.executors_info, + filter_func=lambda e: e.connector_name == self.config.connector_pair_dominant.connector_name and e.trading_pair == self.config.connector_pair_dominant.trading_pair and e.is_active and e.is_trading and e.type == "position_executor" + ) + return active_executors_dominant_placed, active_executors_dominant_filled + + def get_executors_hedge(self): + active_executors_hedge_placed = self.filter_executors( + self.executors_info, + filter_func=lambda e: e.connector_name == self.config.connector_pair_hedge.connector_name and e.trading_pair == self.config.connector_pair_hedge.trading_pair and e.is_active and not e.is_trading and e.type == "position_executor" + ) + active_executors_hedge_filled = self.filter_executors( + self.executors_info, + filter_func=lambda e: e.connector_name == self.config.connector_pair_hedge.connector_name and e.trading_pair == self.config.connector_pair_hedge.trading_pair and e.is_active and e.is_trading and e.type == "position_executor" + ) + return active_executors_hedge_placed, active_executors_hedge_filled + + def to_format_status(self) -> List[str]: + """ + Format the status of the controller for display. + """ + status_lines = [] + status_lines.append(f""" +Dominant Pair: {self.config.connector_pair_dominant} | Hedge Pair: {self.config.connector_pair_hedge} | +Timeframe: {self.config.interval} | Lookback Period: {self.config.lookback_period} | Entry Threshold: {self.config.entry_threshold} + +Positions targets: +Theoretical Dominant : {self.theoretical_dominant_quote} | Theoretical Hedge: {self.theoretical_hedge_quote} | Position Hedge Ratio: {self.config.pos_hedge_ratio} +Position Dominant : {self.processed_data['position_dominant_quote']:.2f} | Position Hedge: {self.processed_data['position_hedge_quote']:.2f} | Imbalance: {self.processed_data['imbalance']:.2f} | Imbalance Scaled: {self.processed_data['imbalance_scaled_pct']:.2f} % + +Current Executors: +Active Orders Dominant : {len(self.processed_data['executors_dominant_placed'])} | Active Orders Hedge : {len(self.processed_data['executors_hedge_placed'])} | +Active Orders Dominant Filled: {len(self.processed_data['executors_dominant_filled'])} | Active Orders Hedge Filled: {len(self.processed_data['executors_hedge_filled'])} + +Signal: {self.processed_data['signal']:.2f} | Z-Score: {self.processed_data['z_score']:.2f} | Spread: {self.processed_data['spread']:.2f} +Alpha : {self.processed_data['alpha']:.2f} | Beta: {self.processed_data['beta']:.2f} +Pair PnL PCT: {self.processed_data['pair_pnl_pct'] * 100:.2f} % +""") + return status_lines diff --git a/bots/controllers/market_making/dman_maker_v2.py b/bots/controllers/market_making/dman_maker_v2.py index 6cba442e..2002fddd 100644 --- a/bots/controllers/market_making/dman_maker_v2.py +++ b/bots/controllers/market_making/dman_maker_v2.py @@ -2,6 +2,8 @@ from typing import List, Optional import pandas_ta as ta # noqa: F401 +from pydantic import Field, field_validator + from hummingbot.core.data_type.common import TradeType from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy_v2.controllers.market_making_controller_base import ( @@ -10,7 +12,6 @@ ) from hummingbot.strategy_v2.executors.dca_executor.data_types import DCAExecutorConfig, DCAMode from hummingbot.strategy_v2.models.executor_actions import ExecutorAction, StopExecutorAction -from pydantic import Field, field_validator class DManMakerV2Config(MarketMakingControllerConfigBase): From 52beebd7c3d46b2e72c46a69d00778ebd0985a68 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:02:35 +0800 Subject: [PATCH 046/244] (feat) refactor market data endpoints to user mdp --- routers/market_data.py | 112 +++++++++++++++++++++++++++++++++++------ 1 file changed, 96 insertions(+), 16 deletions(-) diff --git a/routers/market_data.py b/routers/market_data.py index 38aadf8f..6686c781 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -1,37 +1,117 @@ import asyncio -from fastapi import APIRouter -from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory +from fastapi import APIRouter, Request from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig +from services.market_data_feed_manager import MarketDataFeedManager router = APIRouter(tags=["Market"], prefix="/market-data") -candles_factory = CandlesFactory() -@router.post("/real-time-candles") -async def get_candles(candles_config: CandlesConfig): +@router.post("/candles") +async def get_candles(request: Request, candles_config: CandlesConfig): + """ + Get real-time candles data for a specific trading pair. + + This endpoint uses the MarketDataProvider to get or create a candles feed that will + automatically start and maintain real-time updates. Subsequent requests with the same + configuration will reuse the existing feed for up-to-date data. + + Args: + request: FastAPI request object + candles_config: Configuration for the candles including connector, trading_pair, interval, and max_records + + Returns: + Real-time candles data or error message + """ try: - candles = candles_factory.get_candle(candles_config) - candles.start() - while not candles.ready: - await asyncio.sleep(1) - df = candles.candles_df - candles.stop() - df.drop_duplicates(subset=["timestamp"], inplace=True) - return df + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Get or create the candles feed (this will start it automatically and track access time) + candles_feed = market_data_feed_manager.get_candles_feed(candles_config) + + # Wait for the candles feed to be ready + while not candles_feed.ready: + await asyncio.sleep(0.1) + + # Get the candles dataframe + df = candles_feed.candles_df + + if df is not None and not df.empty: + # Limit to requested max_records and remove duplicates + df = df.tail(candles_config.max_records) + df = df.drop_duplicates(subset=["timestamp"], keep="last") + # Convert to dict for JSON serialization + return df.to_dict(orient="records") + else: + return {"error": "No candles data available"} + except Exception as e: return {"error": str(e)} @router.post("/historical-candles") -async def get_historical_candles(config: HistoricalCandlesConfig): +async def get_historical_candles(request: Request, config: HistoricalCandlesConfig): + """ + Get historical candles data for a specific trading pair. + + Args: + config: Configuration for historical candles including connector, trading pair, interval, start and end time + + Returns: + Historical candles data or error message + """ try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Create candles config from historical config candles_config = CandlesConfig( connector=config.connector_name, trading_pair=config.trading_pair, interval=config.interval ) - candles = candles_factory.get_candle(candles_config) - return await candles.get_historical_candles(config=config) + + # Get or create the candles feed (this will track access time) + candles = market_data_feed_manager.get_candles_feed(candles_config) + + # Fetch historical candles + historical_data = await candles.get_historical_candles(config=config) + + if historical_data is not None and not historical_data.empty: + # Convert to dict for JSON serialization + return historical_data.to_dict(orient="records") + else: + return {"error": "No historical data available"} + except Exception as e: return {"error": str(e)} + + +@router.get("/active-feeds") +async def get_active_feeds(request: Request): + """ + Get information about currently active market data feeds. + + Returns: + Dictionary with active feeds information including last access times and expiration + """ + try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + return market_data_feed_manager.get_active_feeds_info() + except Exception as e: + return {"error": str(e)} + + +@router.get("/settings") +async def get_market_data_settings(): + """ + Get current market data settings for debugging. + + Returns: + Current market data configuration + """ + from config import settings + return { + "cleanup_interval": settings.market_data.cleanup_interval, + "feed_timeout": settings.market_data.feed_timeout, + "description": "cleanup_interval: seconds between cleanup runs, feed_timeout: seconds before unused feeds expire" + } From a1b44a03fbcfe1485681b02c6c6c88f00d301805 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:02:43 +0800 Subject: [PATCH 047/244] (feat) default bt to 2025 --- routers/backtesting.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/routers/backtesting.py b/routers/backtesting.py index 16de5a4b..7dd0c7b3 100644 --- a/routers/backtesting.py +++ b/routers/backtesting.py @@ -13,8 +13,8 @@ class BacktestingConfig(BaseModel): - start_time: int = 1672542000 # 2023-01-01 00:00:00 - end_time: int = 1672628400 # 2023-01-01 23:59:00 + start_time: int = 1735689600 # 2025-01-01 00:00:00 + end_time: int = 1738368000 # 2025-02-01 00:00:00 backtesting_resolution: str = "1m" trade_cost: float = 0.0006 config: Union[Dict, str] From c8344ab5705c06135c1cac908ce1a0199c96ebe2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:02:57 +0800 Subject: [PATCH 048/244] (feat) refactor conf module to use pydantic settings --- config.py | 137 ++++++++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 122 insertions(+), 15 deletions(-) diff --git a/config.py b/config.py index 18ab0270..f444872a 100644 --- a/config.py +++ b/config.py @@ -1,19 +1,126 @@ -import os +from typing import List +from pydantic import Field +from pydantic_settings import BaseSettings, SettingsConfigDict -from dotenv import load_dotenv -load_dotenv() +class BrokerSettings(BaseSettings): + """MQTT Broker configuration for bot communication.""" + + host: str = Field(default="localhost", description="MQTT broker host") + port: int = Field(default=1883, description="MQTT broker port") + username: str = Field(default="admin", description="MQTT broker username") + password: str = Field(default="password", description="MQTT broker password") -CONTROLLERS_PATH = "bots/conf/controllers" -CONTROLLERS_MODULE = "bots.controllers" -CONFIG_PASSWORD = os.getenv("CONFIG_PASSWORD", "a") -BROKER_HOST = os.getenv("BROKER_HOST", "localhost") -BROKER_PORT = int(os.getenv("BROKER_PORT", 1883)) -BROKER_USERNAME = os.getenv("BROKER_USERNAME", "admin") -BROKER_PASSWORD = os.getenv("BROKER_PASSWORD", "password") -PASSWORD_VERIFICATION_PATH = "bots/credentials/master_account/.password_verification" -BANNED_TOKENS = os.getenv("BANNED_TOKENS", "NAV,ARS,ETHW,ETHF").split(",") -LOGFIRE_ENVIRONMENT = os.getenv("LOGFIRE_ENVIRONMENT", "dev") + model_config = SettingsConfigDict(env_prefix="BROKER_", extra="ignore") -# Database configuration -DATABASE_URL = os.getenv("DATABASE_URL", "postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api") + +class DatabaseSettings(BaseSettings): + """Database configuration.""" + + url: str = Field( + default="postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api", + description="Database connection URL" + ) + + model_config = SettingsConfigDict(env_prefix="DATABASE_", extra="ignore") + + +class MarketDataSettings(BaseSettings): + """Market data feed manager configuration.""" + + cleanup_interval: int = Field( + default=300, + description="How often to run feed cleanup in seconds" + ) + feed_timeout: int = Field( + default=600, + description="How long to keep unused feeds alive in seconds" + ) + + model_config = SettingsConfigDict(env_prefix="MARKET_DATA_", extra="ignore") + + +class SecuritySettings(BaseSettings): + """Security and authentication configuration.""" + + username: str = Field(default="admin", description="API basic auth username") + password: str = Field(default="admin", description="API basic auth password") + debug_mode: bool = Field(default=False, description="Enable debug mode (disables auth)") + config_password: str = Field(default="a", description="Bot configuration encryption password") + + model_config = SettingsConfigDict( + env_prefix="", + extra="ignore" # Ignore extra environment variables + ) + + +class AWSSettings(BaseSettings): + """AWS configuration for S3 archiving.""" + + api_key: str = Field(default="", description="AWS API key") + secret_key: str = Field(default="", description="AWS secret key") + s3_default_bucket_name: str = Field(default="", description="Default S3 bucket for archiving") + + model_config = SettingsConfigDict(env_prefix="AWS_", extra="ignore") + + +class AppSettings(BaseSettings): + """Main application settings.""" + + # Static paths + controllers_path: str = "bots/conf/controllers" + controllers_module: str = "bots.controllers" + password_verification_path: str = "bots/credentials/master_account/.password_verification" + + # Environment-configurable settings + banned_tokens: List[str] = Field( + default=["NAV", "ARS", "ETHW", "ETHF"], + description="List of banned trading tokens" + ) + logfire_environment: str = Field( + default="dev", + description="Logfire environment name" + ) + + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + case_sensitive=False, + extra="ignore" + ) + + +class Settings(BaseSettings): + """Combined application settings.""" + + broker: BrokerSettings = Field(default_factory=BrokerSettings) + database: DatabaseSettings = Field(default_factory=DatabaseSettings) + market_data: MarketDataSettings = Field(default_factory=MarketDataSettings) + security: SecuritySettings = Field(default_factory=SecuritySettings) + aws: AWSSettings = Field(default_factory=AWSSettings) + app: AppSettings = Field(default_factory=AppSettings) + + model_config = SettingsConfigDict( + env_file=".env", + env_file_encoding="utf-8", + extra="ignore" + ) + + +# Create global settings instance +settings = Settings() + +# Legacy exports for backward compatibility (can be removed gradually) +CONTROLLERS_PATH = settings.app.controllers_path +CONTROLLERS_MODULE = settings.app.controllers_module +CONFIG_PASSWORD = settings.security.config_password +BROKER_HOST = settings.broker.host +BROKER_PORT = settings.broker.port +BROKER_USERNAME = settings.broker.username +BROKER_PASSWORD = settings.broker.password +PASSWORD_VERIFICATION_PATH = settings.app.password_verification_path +BANNED_TOKENS = settings.app.banned_tokens +LOGFIRE_ENVIRONMENT = settings.app.logfire_environment +DATABASE_URL = settings.database.url +MARKET_DATA_CLEANUP_INTERVAL = settings.market_data.cleanup_interval +MARKET_DATA_FEED_TIMEOUT = settings.market_data.feed_timeout From d9e1d0d6f877851a0d194eb9c3d3f23045fe34bd Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:03:09 +0800 Subject: [PATCH 049/244] (feat) add pydantic settings dependency --- environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/environment.yml b/environment.yml index e2c2634e..cce70501 100644 --- a/environment.yml +++ b/environment.yml @@ -24,3 +24,4 @@ dependencies: - asyncpg - psycopg2-binary - greenlet + - pydantic-settings From 60ae43a4aca4e67028b657c2d539465db5e247a5 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:11:54 +0800 Subject: [PATCH 050/244] (feat) re-launch setup.sh --- set_environment.sh | 18 ----- setup.sh | 188 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 188 insertions(+), 18 deletions(-) delete mode 100644 set_environment.sh create mode 100755 setup.sh diff --git a/set_environment.sh b/set_environment.sh deleted file mode 100644 index 10b41336..00000000 --- a/set_environment.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -# Create or overwrite .env file -echo "Setting up .env file for the project... -By default, the current working directory will be used as the BOTS_PATH and the CONFIG_PASSWORD will be set to 'a'." - -# Asking for CONFIG_PASSWORD and BOTS_PATH -CONFIG_PASSWORD=a -USERNAME=admin -PASSWORD=admin -BOTS_PATH=$(pwd) - -# Write to .env file -echo "CONFIG_PASSWORD=$CONFIG_PASSWORD" > .env -echo "BOTS_PATH=$BOTS_PATH" >> .env -echo "USERNAME=$USERNAME" >> .env -echo "PASSWORD=$PASSWORD" >> .env -echo "LOGFIRE_ENVIRONMENT=dev" >> .env diff --git a/setup.sh b/setup.sh new file mode 100755 index 00000000..da99a7ca --- /dev/null +++ b/setup.sh @@ -0,0 +1,188 @@ +#!/bin/bash + +# Backend API Setup Script +# This script creates a comprehensive .env file with all configuration options +# following the Pydantic Settings structure established in config.py + +set -e # Exit on any error + +echo "🚀 Backend API Environment Setup" +echo "=================================" +echo "" + +# Colors for better output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +PURPLE='\033[0;35m' +CYAN='\033[0;36m' +NC='\033[0m' # No Color + +# Function to prompt for input with default value +prompt_with_default() { + local prompt="$1" + local default="$2" + local result + + echo -n -e "${CYAN}$prompt${NC} [${YELLOW}$default${NC}]: " + read -r result + echo "${result:-$default}" +} + +# Function to prompt for password (hidden input) +prompt_password() { + local prompt="$1" + local default="$2" + local result + + echo -n -e "${CYAN}$prompt${NC} [${YELLOW}$default${NC}]: " + read -r -s result + echo "" # New line after hidden input + echo "${result:-$default}" +} + +echo -e "${BLUE}📁 Project Configuration${NC}" +echo "========================" + +# Basic paths and project settings +BOTS_PATH=$(prompt_with_default "Bots directory path" "$(pwd)") +CONFIG_PASSWORD=$(prompt_password "Configuration encryption password" "a") + +echo "" +echo -e "${PURPLE}🔐 Security Configuration${NC}" +echo "=========================" + +# Security settings +USERNAME=$(prompt_with_default "API username" "admin") +PASSWORD=$(prompt_password "API password" "admin") +DEBUG_MODE=$(prompt_with_default "Enable debug mode (true/false)" "false") + +echo "" +echo -e "${GREEN}🔗 MQTT Broker Configuration${NC}" +echo "=============================" + +# Broker settings +BROKER_HOST=$(prompt_with_default "MQTT broker host" "localhost") +BROKER_PORT=$(prompt_with_default "MQTT broker port" "1883") +BROKER_USERNAME=$(prompt_with_default "MQTT broker username" "admin") +BROKER_PASSWORD=$(prompt_password "MQTT broker password" "password") + +echo "" +echo -e "${YELLOW}💾 Database Configuration${NC}" +echo "=========================" + +# Database settings +DATABASE_URL=$(prompt_with_default "Database URL" "postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api") + +echo "" +echo -e "${CYAN}📊 Market Data Configuration${NC}" +echo "============================" + +# Market data settings +CLEANUP_INTERVAL=$(prompt_with_default "Feed cleanup interval (seconds)" "300") +FEED_TIMEOUT=$(prompt_with_default "Feed timeout (seconds)" "600") + +echo "" +echo -e "${PURPLE}☁️ AWS Configuration (Optional)${NC}" +echo "===============================" + +# AWS settings (optional) +AWS_API_KEY=$(prompt_with_default "AWS API Key (optional)" "") +AWS_SECRET_KEY=$(prompt_password "AWS Secret Key (optional)" "") +S3_BUCKET=$(prompt_with_default "S3 Default Bucket (optional)" "") + +echo "" +echo -e "${BLUE}⚙️ Application Settings${NC}" +echo "======================" + +# Application settings +LOGFIRE_ENV=$(prompt_with_default "Logfire environment" "dev") +BANNED_TOKENS=$(prompt_with_default "Banned tokens (comma-separated)" "NAV,ARS,ETHW,ETHF") + +echo "" +echo -e "${GREEN}📝 Creating .env file...${NC}" + +# Create .env file with proper structure and comments +cat > .env << EOF +# ================================================================= +# Backend API Environment Configuration +# Generated on: $(date) +# ================================================================= + +# ================================================================= +# 🔐 Security Configuration +# ================================================================= +USERNAME=$USERNAME +PASSWORD=$PASSWORD +DEBUG_MODE=$DEBUG_MODE +CONFIG_PASSWORD=$CONFIG_PASSWORD + +# ================================================================= +# 🔗 MQTT Broker Configuration (BROKER_*) +# ================================================================= +BROKER_HOST=$BROKER_HOST +BROKER_PORT=$BROKER_PORT +BROKER_USERNAME=$BROKER_USERNAME +BROKER_PASSWORD=$BROKER_PASSWORD + +# ================================================================= +# 💾 Database Configuration (DATABASE_*) +# ================================================================= +DATABASE_URL=$DATABASE_URL + +# ================================================================= +# 📊 Market Data Feed Manager Configuration (MARKET_DATA_*) +# ================================================================= +MARKET_DATA_CLEANUP_INTERVAL=$CLEANUP_INTERVAL +MARKET_DATA_FEED_TIMEOUT=$FEED_TIMEOUT + +# ================================================================= +# ☁️ AWS Configuration (AWS_*) - Optional +# ================================================================= +AWS_API_KEY=$AWS_API_KEY +AWS_SECRET_KEY=$AWS_SECRET_KEY +AWS_S3_DEFAULT_BUCKET_NAME=$S3_BUCKET + +# ================================================================= +# ⚙️ Application Settings +# ================================================================= +LOGFIRE_ENVIRONMENT=$LOGFIRE_ENV +BANNED_TOKENS=$BANNED_TOKENS + +# ================================================================= +# 📁 Legacy Settings (maintained for backward compatibility) +# ================================================================= +BOTS_PATH=$BOTS_PATH + +EOF + +echo -e "${GREEN}✅ .env file created successfully!${NC}" +echo "" + +# Display configuration summary +echo -e "${BLUE}📋 Configuration Summary${NC}" +echo "=======================" +echo -e "${CYAN}Security:${NC} Username: $USERNAME, Debug: $DEBUG_MODE" +echo -e "${CYAN}Broker:${NC} $BROKER_HOST:$BROKER_PORT" +echo -e "${CYAN}Database:${NC} ${DATABASE_URL%%@*}@[hidden]" +echo -e "${CYAN}Market Data:${NC} Cleanup: ${CLEANUP_INTERVAL}s, Timeout: ${FEED_TIMEOUT}s" +echo -e "${CYAN}Environment:${NC} $LOGFIRE_ENV" + +if [ -n "$AWS_API_KEY" ]; then + echo -e "${CYAN}AWS:${NC} Configured with S3 bucket: $S3_BUCKET" +else + echo -e "${CYAN}AWS:${NC} Not configured (optional)" +fi + +echo "" +echo -e "${GREEN}🎉 Setup Complete!${NC}" +echo "" +echo -e "${YELLOW}Next steps:${NC}" +echo "1. Review the .env file if needed: ${BLUE}cat .env${NC}" +echo "2. Install dependencies: ${BLUE}make install${NC}" +echo "3. Start the API: ${BLUE}make run${NC}" +echo "" +echo -e "${PURPLE}💡 Pro tip:${NC} You can modify environment variables in .env file anytime" +echo -e "${PURPLE}📚 Documentation:${NC} Check config.py for all available settings" +echo "" \ No newline at end of file From 3ff1f8dd22b86000b76e7f474e6f690242307e58 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:12:03 +0800 Subject: [PATCH 051/244] (feat) update readme --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7d23d29b..383b74a1 100644 --- a/README.md +++ b/README.md @@ -31,9 +31,9 @@ Run the API using uvicorn with the following command: For running the project using Docker, follow these steps: 1. **Set up Environment Variables**: - - Execute the `set_environment.sh` script to configure the necessary environment variables in the `.env` file: + - Execute the `setup.sh` script to configure the necessary environment variables in the `.env` file: ```bash - ./set_environment.sh + ./setup.sh ``` 2. **Build and Run with Docker Compose**: From f46311f5d004c79d5be9db7f8fda1758935c690a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 15:49:32 +0800 Subject: [PATCH 052/244] (feat) use settings everywhere --- config.py | 15 ---------- main.py | 53 ++++++++++++++++++++++++++---------- routers/backtesting.py | 8 +++--- services/accounts_service.py | 8 +++--- services/docker_service.py | 3 +- utils/security.py | 8 +++--- 6 files changed, 53 insertions(+), 42 deletions(-) diff --git a/config.py b/config.py index f444872a..e6e3d0b1 100644 --- a/config.py +++ b/config.py @@ -109,18 +109,3 @@ class Settings(BaseSettings): # Create global settings instance settings = Settings() - -# Legacy exports for backward compatibility (can be removed gradually) -CONTROLLERS_PATH = settings.app.controllers_path -CONTROLLERS_MODULE = settings.app.controllers_module -CONFIG_PASSWORD = settings.security.config_password -BROKER_HOST = settings.broker.host -BROKER_PORT = settings.broker.port -BROKER_USERNAME = settings.broker.username -BROKER_PASSWORD = settings.broker.password -PASSWORD_VERIFICATION_PATH = settings.app.password_verification_path -BANNED_TOKENS = settings.app.banned_tokens -LOGFIRE_ENVIRONMENT = settings.app.logfire_environment -DATABASE_URL = settings.database.url -MARKET_DATA_CLEANUP_INTERVAL = settings.market_data.cleanup_interval -MARKET_DATA_FEED_TIMEOUT = settings.market_data.feed_timeout diff --git a/main.py b/main.py index 629aa7fa..608e334b 100644 --- a/main.py +++ b/main.py @@ -1,4 +1,3 @@ -import os import secrets from contextlib import asynccontextmanager from typing import Annotated @@ -8,11 +7,15 @@ from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.middleware.cors import CORSMiddleware +from hummingbot.data_feed.market_data_provider import MarketDataProvider +from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from config import LOGFIRE_ENVIRONMENT, BROKER_HOST, BROKER_PASSWORD, BROKER_PORT, BROKER_USERNAME +from config import settings +from utils.security import BackendAPISecurity from services.bots_orchestrator import BotsOrchestrator from services.accounts_service import AccountsService from services.docker_service import DockerService +from services.market_data_feed_manager import MarketDataFeedManager from utils.bot_archiver import BotArchiver from routers import ( accounts, @@ -41,10 +44,10 @@ # Load environment variables early load_dotenv() -# Environment variables -username = os.getenv("USERNAME", "admin") -password = os.getenv("PASSWORD", "admin") -debug_mode = os.getenv("DEBUG_MODE", "False").lower() in ("true", "1", "t") +# Get settings from Pydantic Settings +username = settings.security.username +password = settings.security.password +debug_mode = settings.security.debug_mode # Security setup security = HTTPBasic() @@ -58,32 +61,51 @@ async def lifespan(app: FastAPI): """ # Initialize services bots_orchestrator = BotsOrchestrator( - broker_host=BROKER_HOST, - broker_port=BROKER_PORT, - broker_username=BROKER_USERNAME, - broker_password=BROKER_PASSWORD + broker_host=settings.broker.host, + broker_port=settings.broker.port, + broker_username=settings.broker.username, + broker_password=settings.broker.password ) accounts_service = AccountsService() docker_service = DockerService() bot_archiver = BotArchiver( - os.environ.get("AWS_API_KEY"), - os.environ.get("AWS_SECRET_KEY"), - os.environ.get("S3_DEFAULT_BUCKET_NAME") + settings.aws.api_key, + settings.aws.secret_key, + settings.aws.s3_default_bucket_name ) # Initialize database await accounts_service.ensure_db_initialized() + # Ensure password verification file exists + if BackendAPISecurity.new_password_required(): + # Create secrets manager with CONFIG_PASSWORD + secrets_manager = ETHKeyFileSecretManger(password=settings.security.config_password) + BackendAPISecurity.store_password_verification(secrets_manager) + logging.info("Created password verification file for master_account") + + # Initialize MarketDataProvider with empty connectors (will use non-trading connectors) + market_data_provider = MarketDataProvider(connectors={}) + + # Initialize MarketDataFeedManager with lifecycle management + market_data_feed_manager = MarketDataFeedManager( + market_data_provider=market_data_provider, + cleanup_interval=settings.market_data.cleanup_interval, + feed_timeout=settings.market_data.feed_timeout + ) + # Store services in app state app.state.bots_orchestrator = bots_orchestrator app.state.accounts_service = accounts_service app.state.docker_service = docker_service app.state.bot_archiver = bot_archiver + app.state.market_data_feed_manager = market_data_feed_manager # Start services bots_orchestrator.start_update_active_bots_loop() accounts_service.start_update_account_state_loop() + market_data_feed_manager.start() yield @@ -91,6 +113,9 @@ async def lifespan(app: FastAPI): bots_orchestrator.stop_update_active_bots_loop() accounts_service.stop_update_account_state_loop() + # Stop market data feed manager (which will stop all feeds) + market_data_feed_manager.stop() + # Close database connections await accounts_service.db_manager.close() @@ -112,7 +137,7 @@ async def lifespan(app: FastAPI): allow_headers=["*"], ) -logfire.configure(send_to_logfire="if-token-present", environment=LOGFIRE_ENVIRONMENT, service_name="backend-api") +logfire.configure(send_to_logfire="if-token-present", environment=settings.app.logfire_environment, service_name="backend-api") logfire.instrument_fastapi(app) def auth_user( diff --git a/routers/backtesting.py b/routers/backtesting.py index 7dd0c7b3..71fb8cb4 100644 --- a/routers/backtesting.py +++ b/routers/backtesting.py @@ -5,7 +5,7 @@ from hummingbot.strategy_v2.backtesting.backtesting_engine_base import BacktestingEngineBase from pydantic import BaseModel -from config import CONTROLLERS_MODULE, CONTROLLERS_PATH +from config import settings router = APIRouter(tags=["Backtesting"], prefix="/backtesting") candles_factory = CandlesFactory() @@ -26,13 +26,13 @@ async def run_backtesting(backtesting_config: BacktestingConfig): if isinstance(backtesting_config.config, str): controller_config = backtesting_engine.get_controller_config_instance_from_yml( config_path=backtesting_config.config, - controllers_conf_dir_path=CONTROLLERS_PATH, - controllers_module=CONTROLLERS_MODULE + controllers_conf_dir_path=settings.app.controllers_path, + controllers_module=settings.app.controllers_module ) else: controller_config = backtesting_engine.get_controller_config_instance_from_dict( config_data=backtesting_config.config, - controllers_module=CONTROLLERS_MODULE + controllers_module=settings.app.controllers_module ) backtesting_results = await backtesting_engine.run_backtesting( controller_config=controller_config, trade_cost=backtesting_config.trade_cost, diff --git a/services/accounts_service.py b/services/accounts_service.py index 00abe8dd..80a1b102 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -8,7 +8,7 @@ from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from config import BANNED_TOKENS, CONFIG_PASSWORD, DATABASE_URL +from config import settings from database import AsyncDatabaseManager, AccountRepository from utils.connector_manager import ConnectorManager from utils.file_system import FileSystemUtil @@ -32,7 +32,7 @@ def __init__(self, update_account_state_interval_minutes: int = 5, default_quote: str = "USDT", account_history_file: str = "account_state_history.json"): - self.secrets_manager = ETHKeyFileSecretManger(CONFIG_PASSWORD) + self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) self.connector_manager = ConnectorManager(self.secrets_manager) self.accounts = {} self.accounts_state = {} @@ -44,7 +44,7 @@ def __init__(self, self._update_account_state_task: Optional[asyncio.Task] = None # Database setup - self.db_manager = AsyncDatabaseManager(DATABASE_URL) + self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False async def ensure_db_initialized(self): @@ -249,7 +249,7 @@ async def update_account_state(self): tokens_info = [] try: balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if - value != Decimal("0") and key not in BANNED_TOKENS] + value != Decimal("0") and key not in settings.app.banned_tokens] unique_tokens = [balance["token"] for balance in balances] trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) diff --git a/services/docker_service.py b/services/docker_service.py index 17701c31..c9718aed 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -6,6 +6,7 @@ from docker.errors import DockerException from docker.types import LogConfig +from config import settings from models import HummingbotInstanceConfig from utils.file_system import FileSystemUtil @@ -164,7 +165,7 @@ def create_hummingbot_instance(self, config: HummingbotInstanceConfig): # Set up environment variables environment = {} - password = os.environ.get('CONFIG_PASSWORD', "a") + password = settings.security.config_password if password: environment["CONFIG_PASSWORD"] = password diff --git a/utils/security.py b/utils/security.py index c87e104d..ff697b42 100644 --- a/utils/security.py +++ b/utils/security.py @@ -10,7 +10,7 @@ ) from hummingbot.client.config.security import Security -from config import PASSWORD_VERIFICATION_PATH +from config import settings from utils.file_system import FileSystemUtil from utils.backend_api_config_adapter import BackendAPIConfigAdapter @@ -62,17 +62,17 @@ def update_connector_keys(cls, account_name: str, connector_config: ClientConfig @staticmethod def new_password_required() -> bool: - return not PASSWORD_VERIFICATION_PATH.exists() + return not Path(settings.app.password_verification_path).exists() @staticmethod def store_password_verification(secrets_manager: BaseSecretsManager): encrypted_word = secrets_manager.encrypt_secret_value(PASSWORD_VERIFICATION_WORD, PASSWORD_VERIFICATION_WORD) - FileSystemUtil.ensure_file_and_dump_text(PASSWORD_VERIFICATION_PATH, encrypted_word) + FileSystemUtil.ensure_file_and_dump_text(settings.app.password_verification_path, encrypted_word) @staticmethod def validate_password(secrets_manager: BaseSecretsManager) -> bool: valid = False - with open(PASSWORD_VERIFICATION_PATH, "r") as f: + with open(settings.app.password_verification_path, "r") as f: encrypted_word = f.read() try: decrypted_word = secrets_manager.decrypt_secret_value(PASSWORD_VERIFICATION_WORD, encrypted_word) From fcdf50f8e7c90b7100e6eeb4277e5db37367b0f9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 16:26:48 +0800 Subject: [PATCH 053/244] (feat) add market data feed --- services/market_data_feed_manager.py | 280 +++++++++++++++++++++++++++ 1 file changed, 280 insertions(+) create mode 100644 services/market_data_feed_manager.py diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py new file mode 100644 index 00000000..01e9825b --- /dev/null +++ b/services/market_data_feed_manager.py @@ -0,0 +1,280 @@ +import asyncio +import time +from typing import Dict, Optional, Any, Callable +import logging +from enum import Enum + +from hummingbot.data_feed.candles_feed.data_types import CandlesConfig +from hummingbot.data_feed.market_data_provider import MarketDataProvider + + +class FeedType(Enum): + """Types of market data feeds that can be managed.""" + CANDLES = "candles" + ORDER_BOOK = "order_book" + TRADES = "trades" + TICKER = "ticker" + + +class MarketDataFeedManager: + """ + Generic manager for market data feeds lifecycle with automatic cleanup. + + This service wraps the MarketDataProvider and tracks when any type of market data feed + is last accessed. Feeds that haven't been accessed within the specified timeout period + are automatically stopped and cleaned up. + """ + + def __init__(self, market_data_provider: MarketDataProvider, cleanup_interval: int = 300, feed_timeout: int = 600): + """ + Initialize the MarketDataFeedManager. + + Args: + market_data_provider: The underlying MarketDataProvider instance + cleanup_interval: How often to run cleanup (seconds, default: 5 minutes) + feed_timeout: How long to keep unused feeds alive (seconds, default: 10 minutes) + """ + self.market_data_provider = market_data_provider + self.cleanup_interval = cleanup_interval + self.feed_timeout = feed_timeout + self.last_access_times: Dict[str, float] = {} + self.feed_configs: Dict[str, tuple] = {} # Store feed configs for cleanup + self._cleanup_task: Optional[asyncio.Task] = None + self._is_running = False + self.logger = logging.getLogger(__name__) + + # Registry of cleanup functions for different feed types + self._cleanup_functions: Dict[FeedType, Callable] = { + FeedType.CANDLES: self._cleanup_candle_feed, + FeedType.ORDER_BOOK: self._cleanup_order_book_feed, + # Add more feed types as needed + } + + def start(self): + """Start the cleanup background task.""" + if not self._is_running: + self._is_running = True + self._cleanup_task = asyncio.create_task(self._cleanup_loop()) + self.logger.info(f"MarketDataFeedManager started with cleanup_interval={self.cleanup_interval}s, feed_timeout={self.feed_timeout}s") + + def stop(self): + """Stop the cleanup background task and all feeds.""" + self._is_running = False + if self._cleanup_task: + self._cleanup_task.cancel() + self._cleanup_task = None + + # Stop all feeds managed by the MarketDataProvider + self.market_data_provider.stop() + self.last_access_times.clear() + self.feed_configs.clear() + self.logger.info("MarketDataFeedManager stopped") + + def get_candles_feed(self, config: CandlesConfig): + """ + Get a candles feed and update its last access time. + + Args: + config: CandlesConfig for the desired feed + + Returns: + Candle feed instance + """ + feed_key = self._generate_feed_key(FeedType.CANDLES, config.connector, config.trading_pair, config.interval) + + # Update last access time and store config for cleanup + self.last_access_times[feed_key] = time.time() + self.feed_configs[feed_key] = (FeedType.CANDLES, config) + + # Get the feed from MarketDataProvider + feed = self.market_data_provider.get_candles_feed(config) + + self.logger.debug(f"Accessed candle feed: {feed_key}") + return feed + + def get_candles_df(self, connector_name: str, trading_pair: str, interval: str, max_records: int = 500): + """ + Get candles dataframe and update access time. + + Args: + connector_name: The connector name + trading_pair: The trading pair + interval: The candle interval + max_records: Maximum number of records + + Returns: + Candles dataframe + """ + config = CandlesConfig( + connector=connector_name, + trading_pair=trading_pair, + interval=interval, + max_records=max_records + ) + + feed_key = self._generate_feed_key(FeedType.CANDLES, connector_name, trading_pair, interval) + self.last_access_times[feed_key] = time.time() + self.feed_configs[feed_key] = (FeedType.CANDLES, config) + + # Use MarketDataProvider's convenience method + df = self.market_data_provider.get_candles_df(connector_name, trading_pair, interval, max_records) + + self.logger.debug(f"Accessed candle data: {feed_key}") + return df + + def get_order_book(self, connector_name: str, trading_pair: str): + """ + Get order book and update access time. + + Args: + connector_name: The connector name + trading_pair: The trading pair + + Returns: + Order book instance + """ + feed_key = self._generate_feed_key(FeedType.ORDER_BOOK, connector_name, trading_pair) + + # Update last access time + self.last_access_times[feed_key] = time.time() + self.feed_configs[feed_key] = (FeedType.ORDER_BOOK, (connector_name, trading_pair)) + + # Get order book from MarketDataProvider + order_book = self.market_data_provider.get_order_book(connector_name, trading_pair) + + self.logger.debug(f"Accessed order book: {feed_key}") + return order_book + + def get_order_book_snapshot(self, connector_name: str, trading_pair: str): + """ + Get order book snapshot and update access time. + + Args: + connector_name: The connector name + trading_pair: The trading pair + + Returns: + Tuple of bid and ask DataFrames + """ + feed_key = self._generate_feed_key(FeedType.ORDER_BOOK, connector_name, trading_pair) + + # Update last access time + self.last_access_times[feed_key] = time.time() + self.feed_configs[feed_key] = (FeedType.ORDER_BOOK, (connector_name, trading_pair)) + + # Get order book snapshot from MarketDataProvider + snapshot = self.market_data_provider.get_order_book_snapshot(connector_name, trading_pair) + + self.logger.debug(f"Accessed order book snapshot: {feed_key}") + return snapshot + + async def _cleanup_loop(self): + """Background task that periodically cleans up unused feeds.""" + while self._is_running: + try: + await self._cleanup_unused_feeds() + await asyncio.sleep(self.cleanup_interval) + except asyncio.CancelledError: + break + except Exception as e: + self.logger.error(f"Error in cleanup loop: {e}", exc_info=True) + await asyncio.sleep(self.cleanup_interval) + + async def _cleanup_unused_feeds(self): + """Clean up feeds that haven't been accessed within the timeout period.""" + current_time = time.time() + feeds_to_remove = [] + + for feed_key, last_access_time in self.last_access_times.items(): + if current_time - last_access_time > self.feed_timeout: + feeds_to_remove.append(feed_key) + + for feed_key in feeds_to_remove: + try: + # Get feed type and config + feed_type, config = self.feed_configs[feed_key] + + # Use appropriate cleanup function + cleanup_func = self._cleanup_functions.get(feed_type) + if cleanup_func: + cleanup_func(config) + + # Remove from tracking + del self.last_access_times[feed_key] + del self.feed_configs[feed_key] + + self.logger.info(f"Cleaned up unused {feed_type.value} feed: {feed_key}") + + except Exception as e: + self.logger.error(f"Error cleaning up feed {feed_key}: {e}", exc_info=True) + + if feeds_to_remove: + self.logger.info(f"Cleaned up {len(feeds_to_remove)} unused market data feeds") + + def _cleanup_candle_feed(self, config: CandlesConfig): + """Clean up a candle feed.""" + self.market_data_provider.stop_candle_feed(config) + + def _cleanup_order_book_feed(self, config: tuple): + """Clean up an order book feed.""" + # Order books are typically managed by connectors, so we might not need explicit cleanup + # This is a placeholder for future implementation if needed + pass + + def _generate_feed_key(self, feed_type: FeedType, connector: str, trading_pair: str, interval: str = None) -> str: + """Generate a unique key for a market data feed.""" + if interval: + return f"{feed_type.value}_{connector}_{trading_pair}_{interval}" + else: + return f"{feed_type.value}_{connector}_{trading_pair}" + + def get_active_feeds_info(self) -> Dict[str, dict]: + """ + Get information about currently active feeds. + + Returns: + Dictionary with feed information including last access times and feed types + """ + current_time = time.time() + result = {} + + for feed_key, last_access in self.last_access_times.items(): + feed_type, config = self.feed_configs.get(feed_key, (None, None)) + result[feed_key] = { + "feed_type": feed_type.value if feed_type else "unknown", + "last_access_time": last_access, + "seconds_since_access": current_time - last_access, + "will_expire_in": max(0, self.feed_timeout - (current_time - last_access)), + "config": str(config) # String representation of config + } + + return result + + def manually_cleanup_feed(self, feed_type: FeedType, connector: str, trading_pair: str, interval: str = None): + """ + Manually cleanup a specific feed. + + Args: + feed_type: Type of feed to cleanup + connector: Connector name + trading_pair: Trading pair + interval: Interval (for candles only) + """ + feed_key = self._generate_feed_key(feed_type, connector, trading_pair, interval) + + if feed_key in self.feed_configs: + feed_type_obj, config = self.feed_configs[feed_key] + cleanup_func = self._cleanup_functions.get(feed_type_obj) + + if cleanup_func: + try: + cleanup_func(config) + del self.last_access_times[feed_key] + del self.feed_configs[feed_key] + self.logger.info(f"Manually cleaned up feed: {feed_key}") + except Exception as e: + self.logger.error(f"Error manually cleaning up feed {feed_key}: {e}", exc_info=True) + else: + self.logger.warning(f"No cleanup function for feed type: {feed_type}") + else: + self.logger.warning(f"Feed not found for cleanup: {feed_key}") \ No newline at end of file From 08c458403c1797ff827e48d4d36cb182991a1f81 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 16:26:59 +0800 Subject: [PATCH 054/244] (feat) improve setup.sh script --- setup.sh | 120 +++++++++++++++++++------------------------------------ 1 file changed, 40 insertions(+), 80 deletions(-) diff --git a/setup.sh b/setup.sh index da99a7ca..f017c6c8 100755 --- a/setup.sh +++ b/setup.sh @@ -6,10 +6,6 @@ set -e # Exit on any error -echo "🚀 Backend API Environment Setup" -echo "=================================" -echo "" - # Colors for better output RED='\033[0;31m' GREEN='\033[0;32m' @@ -19,86 +15,41 @@ PURPLE='\033[0;35m' CYAN='\033[0;36m' NC='\033[0m' # No Color -# Function to prompt for input with default value -prompt_with_default() { - local prompt="$1" - local default="$2" - local result - - echo -n -e "${CYAN}$prompt${NC} [${YELLOW}$default${NC}]: " - read -r result - echo "${result:-$default}" -} - -# Function to prompt for password (hidden input) -prompt_password() { - local prompt="$1" - local default="$2" - local result - - echo -n -e "${CYAN}$prompt${NC} [${YELLOW}$default${NC}]: " - read -r -s result - echo "" # New line after hidden input - echo "${result:-$default}" -} - -echo -e "${BLUE}📁 Project Configuration${NC}" -echo "========================" - -# Basic paths and project settings -BOTS_PATH=$(prompt_with_default "Bots directory path" "$(pwd)") -CONFIG_PASSWORD=$(prompt_password "Configuration encryption password" "a") - -echo "" -echo -e "${PURPLE}🔐 Security Configuration${NC}" -echo "=========================" - -# Security settings -USERNAME=$(prompt_with_default "API username" "admin") -PASSWORD=$(prompt_password "API password" "admin") -DEBUG_MODE=$(prompt_with_default "Enable debug mode (true/false)" "false") - +echo "🚀 Backend API Setup" echo "" -echo -e "${GREEN}🔗 MQTT Broker Configuration${NC}" -echo "=============================" -# Broker settings -BROKER_HOST=$(prompt_with_default "MQTT broker host" "localhost") -BROKER_PORT=$(prompt_with_default "MQTT broker port" "1883") -BROKER_USERNAME=$(prompt_with_default "MQTT broker username" "admin") -BROKER_PASSWORD=$(prompt_password "MQTT broker password" "password") +echo -n "Config password [default: admin]: " +read CONFIG_PASSWORD +CONFIG_PASSWORD=${CONFIG_PASSWORD:-admin} + +echo -n "API username [default: admin]: " +read USERNAME +USERNAME=${USERNAME:-admin} + +echo -n "API password [default: admin]: " +read PASSWORD +PASSWORD=${PASSWORD:-admin} + +# Set paths and defaults +BOTS_PATH=$(pwd) + +# Use sensible defaults for everything else +DEBUG_MODE="false" +BROKER_HOST="localhost" +BROKER_PORT="1883" +BROKER_USERNAME="admin" +BROKER_PASSWORD="password" +DATABASE_URL="postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api" +CLEANUP_INTERVAL="300" +FEED_TIMEOUT="600" +AWS_API_KEY="" +AWS_SECRET_KEY="" +S3_BUCKET="" +LOGFIRE_ENV="dev" +BANNED_TOKENS="NAV,ARS,ETHW,ETHF" echo "" -echo -e "${YELLOW}💾 Database Configuration${NC}" -echo "=========================" - -# Database settings -DATABASE_URL=$(prompt_with_default "Database URL" "postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api") - -echo "" -echo -e "${CYAN}📊 Market Data Configuration${NC}" -echo "============================" - -# Market data settings -CLEANUP_INTERVAL=$(prompt_with_default "Feed cleanup interval (seconds)" "300") -FEED_TIMEOUT=$(prompt_with_default "Feed timeout (seconds)" "600") - -echo "" -echo -e "${PURPLE}☁️ AWS Configuration (Optional)${NC}" -echo "===============================" - -# AWS settings (optional) -AWS_API_KEY=$(prompt_with_default "AWS API Key (optional)" "") -AWS_SECRET_KEY=$(prompt_password "AWS Secret Key (optional)" "") -S3_BUCKET=$(prompt_with_default "S3 Default Bucket (optional)" "") - -echo "" -echo -e "${BLUE}⚙️ Application Settings${NC}" -echo "======================" - -# Application settings -LOGFIRE_ENV=$(prompt_with_default "Logfire environment" "dev") -BANNED_TOKENS=$(prompt_with_default "Banned tokens (comma-separated)" "NAV,ARS,ETHW,ETHF") +echo -e "${GREEN}✅ Using sensible defaults for MQTT, Database, and other settings${NC}" echo "" echo -e "${GREEN}📝 Creating .env file...${NC}" @@ -178,6 +129,14 @@ fi echo "" echo -e "${GREEN}🎉 Setup Complete!${NC}" echo "" + +# Check if password verification file exists +if [ ! -f "bots/credentials/master_account/.password_verification" ]; then + echo -e "${YELLOW}📌 Note:${NC} Password verification file will be created on first startup" + echo -e " Location: ${BLUE}bots/credentials/master_account/.password_verification${NC}" + echo "" +fi + echo -e "${YELLOW}Next steps:${NC}" echo "1. Review the .env file if needed: ${BLUE}cat .env${NC}" echo "2. Install dependencies: ${BLUE}make install${NC}" @@ -185,4 +144,5 @@ echo "3. Start the API: ${BLUE}make run${NC}" echo "" echo -e "${PURPLE}💡 Pro tip:${NC} You can modify environment variables in .env file anytime" echo -e "${PURPLE}📚 Documentation:${NC} Check config.py for all available settings" +echo -e "${PURPLE}🔒 Security:${NC} The password verification file secures bot credentials" echo "" \ No newline at end of file From a18aebcf28d2c49904f36b809fc660bbccc331c8 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 16:56:11 +0800 Subject: [PATCH 055/244] (feat) remove default password verification --- bots/credentials/master_account/.password_verification | 1 - 1 file changed, 1 deletion(-) delete mode 100644 bots/credentials/master_account/.password_verification diff --git a/bots/credentials/master_account/.password_verification b/bots/credentials/master_account/.password_verification deleted file mode 100644 index b8c76184..00000000 --- a/bots/credentials/master_account/.password_verification +++ /dev/null @@ -1 +0,0 @@ -7b2263727970746f223a207b22636970686572223a20226165732d3132382d637472222c2022636970686572706172616d73223a207b226976223a20223864336365306436393461623131396334363135663935366464653839363063227d2c202263697068657274657874223a20223836333266323430613563306131623665353664222c20226b6466223a202270626b646632222c20226b6466706172616d73223a207b2263223a20313030303030302c2022646b6c656e223a2033322c2022707266223a2022686d61632d736861323536222c202273616c74223a20226566373330376531636464373964376132303338323534656139343433663930227d2c20226d6163223a202266393439383534613530633138363633386363353962336133363665633962353333386633613964373266636635343066313034333361353431636232306438227d2c202276657273696f6e223a20337d \ No newline at end of file From 4532508197970380062201cb7fa02dd5a6a0ff41 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 16:56:24 +0800 Subject: [PATCH 056/244] (feat) move banned tokens to app --- config.py | 10 ++++++---- services/accounts_service.py | 2 +- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/config.py b/config.py index e6e3d0b1..725601d1 100644 --- a/config.py +++ b/config.py @@ -73,10 +73,6 @@ class AppSettings(BaseSettings): password_verification_path: str = "bots/credentials/master_account/.password_verification" # Environment-configurable settings - banned_tokens: List[str] = Field( - default=["NAV", "ARS", "ETHW", "ETHF"], - description="List of banned trading tokens" - ) logfire_environment: str = Field( default="dev", description="Logfire environment name" @@ -99,6 +95,12 @@ class Settings(BaseSettings): security: SecuritySettings = Field(default_factory=SecuritySettings) aws: AWSSettings = Field(default_factory=AWSSettings) app: AppSettings = Field(default_factory=AppSettings) + + # Direct banned_tokens field to handle env parsing + banned_tokens: List[str] = Field( + default=["NAV", "ARS", "ETHW", "ETHF"], + description="List of banned trading tokens" + ) model_config = SettingsConfigDict( env_file=".env", diff --git a/services/accounts_service.py b/services/accounts_service.py index 80a1b102..7bc9b22d 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -249,7 +249,7 @@ async def update_account_state(self): tokens_info = [] try: balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if - value != Decimal("0") and key not in settings.app.banned_tokens] + value != Decimal("0") and key not in settings.banned_tokens] unique_tokens = [balance["token"] for balance in balances] trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) From 737828e1e816116237ace7fb8d254525685eb9dc Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 11 Jun 2025 16:56:36 +0800 Subject: [PATCH 057/244] (feat) generate password verification as a first step --- main.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/main.py b/main.py index 608e334b..d2da49d0 100644 --- a/main.py +++ b/main.py @@ -59,6 +59,13 @@ async def lifespan(app: FastAPI): Lifespan context manager for the FastAPI application. Handles startup and shutdown events. """ + # Ensure password verification file exists FIRST + if BackendAPISecurity.new_password_required(): + # Create secrets manager with CONFIG_PASSWORD + secrets_manager = ETHKeyFileSecretManger(password=settings.security.config_password) + BackendAPISecurity.store_password_verification(secrets_manager) + logging.info("Created password verification file for master_account") + # Initialize services bots_orchestrator = BotsOrchestrator( broker_host=settings.broker.host, @@ -78,13 +85,6 @@ async def lifespan(app: FastAPI): # Initialize database await accounts_service.ensure_db_initialized() - # Ensure password verification file exists - if BackendAPISecurity.new_password_required(): - # Create secrets manager with CONFIG_PASSWORD - secrets_manager = ETHKeyFileSecretManger(password=settings.security.config_password) - BackendAPISecurity.store_password_verification(secrets_manager) - logging.info("Created password verification file for master_account") - # Initialize MarketDataProvider with empty connectors (will use non-trading connectors) market_data_provider = MarketDataProvider(connectors={}) From a1f9df86f47db7b9b9f899beb9b0635cc27c8e38 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 10:47:01 +0800 Subject: [PATCH 058/244] (feat) add yml extension if not present --- routers/bot_orchestration.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index deff7bf1..5f516ce5 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -242,12 +242,20 @@ async def deploy_v2_controllers( timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") script_config_filename = f"{deployment.instance_name}-{timestamp}.yml" + # Ensure controller config names have .yml extension + controllers_with_extension = [] + for controller in deployment.controllers_config: + if not controller.endswith('.yml'): + controllers_with_extension.append(f"{controller}.yml") + else: + controllers_with_extension.append(controller) + # Create the script config content script_config_content = { "script_file_name": "v2_with_controllers.py", "candles_config": [], "markets": {}, - "controllers_config": deployment.controllers_config, + "controllers_config": controllers_with_extension, } # Add optional drawdown parameters if provided From b86bf5522931ab3886694a0eedba0dec5816fd79 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 13:51:05 +0800 Subject: [PATCH 059/244] (feat) add quotes --- setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.sh b/setup.sh index f017c6c8..5bd6df9d 100755 --- a/setup.sh +++ b/setup.sh @@ -46,7 +46,7 @@ AWS_API_KEY="" AWS_SECRET_KEY="" S3_BUCKET="" LOGFIRE_ENV="dev" -BANNED_TOKENS="NAV,ARS,ETHW,ETHF" +BANNED_TOKENS='["NAV","ARS","ETHW","ETHF"]' echo "" echo -e "${GREEN}✅ Using sensible defaults for MQTT, Database, and other settings${NC}" From 2f7f5b754125502a6f4579757ca26b7692fcc896 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 13:55:06 +0800 Subject: [PATCH 060/244] (feat) add default quote for kraken --- routers/accounts.py | 264 ++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 242 insertions(+), 22 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index 5e1dc5b1..62533553 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -16,6 +16,12 @@ @router.get("/state", response_model=Dict[str, Dict[str, List[Dict]]]) async def get_all_accounts_state(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get the current state of all accounts. + + Returns: + Dict containing account states with connector balances and token information + """ return accounts_service.get_accounts_state() @@ -53,16 +59,37 @@ async def get_account_state_history( @router.get("/connectors", response_model=List[str]) async def available_connectors(): + """ + Get a list of all available connectors. + + Returns: + List of connector names supported by the system + """ return list(AllConnectorSettings.get_connector_settings().keys()) @router.get("/connector-config-map/{connector_name}", response_model=List[str]) async def get_connector_config_map(connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get configuration fields required for a specific connector. + + Args: + connector_name: Name of the connector to get config map for + + Returns: + List of configuration field names required for the connector + """ return accounts_service.get_connector_config_map(connector_name) @router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get configuration fields for all available connectors. + + Returns: + Dictionary mapping connector names to their required configuration fields + """ all_config_maps = {} for connector in list(AllConnectorSettings.get_connector_settings().keys()): all_config_maps[connector] = accounts_service.get_connector_config_map(connector) @@ -71,11 +98,29 @@ async def get_all_connectors_config_map(accounts_service: AccountsService = Depe @router.get("/", response_model=List[str]) async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get a list of all account names in the system. + + Returns: + List of account names + """ return accounts_service.list_accounts() @router.get("/{account_name}/credentials", response_model=List[str]) async def list_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get a list of all credentials (connectors) configured for a specific account. + + Args: + account_name: Name of the account to list credentials for + + Returns: + List of credential file names (connectors) configured for the account + + Raises: + HTTPException: 404 if account not found + """ try: return accounts_service.list_credentials(account_name) except FileNotFoundError as e: @@ -84,26 +129,63 @@ async def list_credentials(account_name: str, accounts_service: AccountsService @router.post("/add-account", status_code=status.HTTP_201_CREATED) async def add_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Create a new account with default configuration files. + + Args: + account_name: Name of the new account to create + + Returns: + Success message when account is created + + Raises: + HTTPException: 400 if account already exists + """ try: accounts_service.add_account(account_name) - return {"message": "Credential added successfully."} + return {"message": "Account added successfully."} except FileExistsError as e: raise HTTPException(status_code=400, detail=str(e)) @router.post("/delete-account") async def delete_account(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Delete an account and all its associated credentials. + + Args: + account_name: Name of the account to delete + + Returns: + Success message when account is deleted + + Raises: + HTTPException: 400 if trying to delete master account, 404 if account not found + """ try: if account_name == "master_account": raise HTTPException(status_code=400, detail="Cannot delete master account.") accounts_service.delete_account(account_name) - return {"message": "Credential deleted successfully."} + return {"message": "Account deleted successfully."} except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) @router.post("/delete-credential/{account_name}/{connector_name}") async def delete_credential(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Delete a specific connector credential for an account. + + Args: + account_name: Name of the account + connector_name: Name of the connector to delete credentials for + + Returns: + Success message when credential is deleted + + Raises: + HTTPException: 404 if credential not found + """ try: accounts_service.delete_credentials(account_name, connector_name) return {"message": "Credential deleted successfully."} @@ -113,6 +195,20 @@ async def delete_credential(account_name: str, connector_name: str, accounts_ser @router.post("/add-connector-keys/{account_name}/{connector_name}", status_code=status.HTTP_201_CREATED) async def add_connector_keys(account_name: str, connector_name: str, keys: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Add or update connector keys (API credentials) for a specific account and connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector + keys: Dictionary containing the connector credentials + + Returns: + Success message when keys are added + + Raises: + HTTPException: 400 if there's an error adding the keys + """ try: await accounts_service.add_connector_keys(account_name, connector_name, keys) return {"message": "Connector keys added successfully."} @@ -122,16 +218,27 @@ async def add_connector_keys(account_name: str, connector_name: str, keys: Dict, # Account-specific routes -@router.get("/accounts/{account_name}/state", response_model=Dict[str, List[Dict]]) +@router.get("/{account_name}/state", response_model=Dict[str, List[Dict]]) async def get_account_state(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get current state of a specific account.""" + """ + Get current state of a specific account. + + Args: + account_name: Name of the account to get state for + + Returns: + Dictionary mapping connector names to lists of token information + + Raises: + HTTPException: 404 if account not found + """ state = await accounts_service.get_account_current_state(account_name) if not state: raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") return state -@router.get("/accounts/{account_name}/state/history", response_model=PaginatedResponse) +@router.get("/{account_name}/state/history", response_model=PaginatedResponse) async def get_account_history( account_name: str, limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), @@ -140,7 +247,19 @@ async def get_account_history( end_time: datetime = Query(default=None, description="End time for filtering"), accounts_service: AccountsService = Depends(get_accounts_service) ): - """Get historical state of a specific account with pagination.""" + """ + Get historical state of a specific account with pagination. + + Args: + account_name: Name of the account to get history for + limit: Number of items per page (1-1000) + cursor: Cursor for pagination (ISO timestamp) + start_time: Start time for filtering results + end_time: End time for filtering results + + Returns: + Paginated response with historical account state data + """ data, next_cursor, has_more = await accounts_service.get_account_state_history( account_name=account_name, limit=limit, @@ -165,9 +284,20 @@ async def get_account_history( ) -@router.get("/accounts/{account_name}/value", response_model=Dict) +@router.get("/{account_name}/value", response_model=Dict) async def get_account_value(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get total portfolio value for a specific account.""" + """ + Get total portfolio value for a specific account. + + Args: + account_name: Name of the account to get value for + + Returns: + Dictionary with account name and total value + + Raises: + HTTPException: 404 if account not found + """ value_data = await accounts_service.get_portfolio_value(account_name) if account_name not in value_data["accounts"]: raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") @@ -177,9 +307,20 @@ async def get_account_value(account_name: str, accounts_service: AccountsService } -@router.get("/accounts/{account_name}/tokens", response_model=List[Dict]) +@router.get("/{account_name}/tokens", response_model=List[Dict]) async def get_account_tokens(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get all tokens held by a specific account.""" + """ + Get all tokens held by a specific account with aggregated information. + + Args: + account_name: Name of the account to get tokens for + + Returns: + List of token information with total units, value, and connector breakdown + + Raises: + HTTPException: 404 if account not found + """ state = await accounts_service.get_account_current_state(account_name) if not state: raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") @@ -213,16 +354,28 @@ async def get_account_tokens(account_name: str, accounts_service: AccountsServic # Connector-specific routes -@router.get("/accounts/{account_name}/connectors/{connector_name}/state", response_model=List[Dict]) +@router.get("/{account_name}/connectors/{connector_name}/state", response_model=List[Dict]) async def get_connector_state(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get current state of a specific connector.""" + """ + Get current state of a specific connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector + + Returns: + List of token information for the specific connector + + Raises: + HTTPException: 404 if connector not found for account + """ state = await accounts_service.get_connector_current_state(account_name, connector_name) if not state: raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") return state -@router.get("/accounts/{account_name}/connectors/{connector_name}/state/history", response_model=PaginatedResponse) +@router.get("/{account_name}/connectors/{connector_name}/state/history", response_model=PaginatedResponse) async def get_connector_history( account_name: str, connector_name: str, @@ -232,7 +385,20 @@ async def get_connector_history( end_time: datetime = Query(default=None, description="End time for filtering"), accounts_service: AccountsService = Depends(get_accounts_service) ): - """Get historical state of a specific connector with pagination.""" + """ + Get historical state of a specific connector with pagination. + + Args: + account_name: Name of the account + connector_name: Name of the connector + limit: Number of items per page (1-1000) + cursor: Cursor for pagination (ISO timestamp) + start_time: Start time for filtering results + end_time: End time for filtering results + + Returns: + Paginated response with historical connector state data + """ data, next_cursor, has_more = await accounts_service.get_connector_state_history( account_name=account_name, connector_name=connector_name, @@ -262,13 +428,29 @@ async def get_connector_history( # Token-specific routes @router.get("/tokens", response_model=List[str]) async def get_all_tokens(accounts_service: AccountsService = Depends(get_accounts_service)): - """Get all unique tokens across all accounts and connectors.""" + """ + Get all unique tokens across all accounts and connectors. + + Returns: + List of unique token symbols held across all accounts + """ return await accounts_service.get_all_unique_tokens() @router.get("/tokens/{token}/state", response_model=List[Dict]) async def get_token_state(token: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get current state of a specific token across all accounts.""" + """ + Get current state of a specific token across all accounts. + + Args: + token: Symbol of the token to get state for + + Returns: + List of token holdings across all accounts and connectors + + Raises: + HTTPException: 404 if token not found + """ state = await accounts_service.get_token_current_state(token) if not state: raise HTTPException(status_code=404, detail=f"Token '{token}' not found") @@ -277,7 +459,18 @@ async def get_token_state(token: str, accounts_service: AccountsService = Depend @router.get("/tokens/{token}/accounts", response_model=List[Dict]) async def get_token_accounts(token: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get all accounts that hold a specific token.""" + """ + Get all accounts that hold a specific token with aggregated information. + + Args: + token: Symbol of the token to search for + + Returns: + List of accounts holding the token with total units, value, and connector breakdown + + Raises: + HTTPException: 404 if token not found + """ token_states = await accounts_service.get_token_current_state(token) if not token_states: raise HTTPException(status_code=404, detail=f"Token '{token}' not found") @@ -303,9 +496,21 @@ async def get_token_accounts(token: str, accounts_service: AccountsService = Dep return list(accounts.values()) -@router.get("/accounts/{account_name}/tokens/{token}", response_model=Dict) +@router.get("/{account_name}/tokens/{token}", response_model=Dict) async def get_account_token_state(account_name: str, token: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """Get state of a specific token for a specific account.""" + """ + Get state of a specific token for a specific account. + + Args: + account_name: Name of the account + token: Symbol of the token to get state for + + Returns: + Token information including total units, value, and connector breakdown + + Raises: + HTTPException: 404 if account or token not found + """ state = await accounts_service.get_account_current_state(account_name) if not state: raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") @@ -340,13 +545,23 @@ async def get_account_token_state(account_name: str, token: str, accounts_servic # Portfolio aggregation routes @router.get("/portfolio/value", response_model=Dict) async def get_portfolio_value(accounts_service: AccountsService = Depends(get_accounts_service)): - """Get total portfolio value across all accounts.""" + """ + Get total portfolio value across all accounts. + + Returns: + Dictionary with total portfolio value and breakdown by account + """ return await accounts_service.get_portfolio_value() @router.get("/portfolio/tokens", response_model=List[Dict]) async def get_portfolio_tokens(accounts_service: AccountsService = Depends(get_accounts_service)): - """Get all tokens with aggregated holdings across all accounts.""" + """ + Get all tokens with aggregated holdings across all accounts. + + Returns: + List of tokens with total units, value, average price, and account breakdown + """ all_states = accounts_service.get_accounts_state() tokens = {} @@ -399,7 +614,12 @@ async def get_portfolio_tokens(accounts_service: AccountsService = Depends(get_a @router.get("/portfolio/distribution", response_model=Dict) async def get_portfolio_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): - """Get portfolio distribution by token and exchange.""" + """ + Get portfolio distribution by token, exchange, and account. + + Returns: + Dictionary with total value and percentage breakdowns by token, exchange, and account + """ all_states = accounts_service.get_accounts_state() portfolio_value = await accounts_service.get_portfolio_value() total_value = portfolio_value["total_value"] From c072820727738c71af66f538e2571cc90214d6f0 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 13:55:41 +0800 Subject: [PATCH 061/244] (feat) add documentation to endpoints --- routers/backtesting.py | 13 +++ routers/bot_orchestration.py | 89 ++++++++++++++++++++- routers/controllers.py | 148 ++++++++++++++++++++++++++++++++--- routers/databases.py | 42 ++++++++++ routers/docker.py | 95 ++++++++++++++++++++++ routers/market_data.py | 5 +- routers/performance.py | 11 ++- routers/scripts.py | 106 ++++++++++++++++++++++--- 8 files changed, 482 insertions(+), 27 deletions(-) diff --git a/routers/backtesting.py b/routers/backtesting.py index 71fb8cb4..410bea14 100644 --- a/routers/backtesting.py +++ b/routers/backtesting.py @@ -22,6 +22,19 @@ class BacktestingConfig(BaseModel): @router.post("/run-backtesting") async def run_backtesting(backtesting_config: BacktestingConfig): + """ + Run a backtesting simulation with the provided configuration. + + Args: + backtesting_config: Configuration for the backtesting including start/end time, + resolution, trade cost, and controller config + + Returns: + Dictionary containing executors, processed data, and results from the backtest + + Raises: + Returns error dictionary if backtesting fails + """ try: if isinstance(backtesting_config.config, str): controller_config = backtesting_engine.get_controller_config_instance_from_yml( diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 5f516ce5..f7fac1d4 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -16,13 +16,29 @@ @router.get("/status") def get_active_bots_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): - """Returns the cached status of all active bots.""" + """ + Get the status of all active bots. + + Args: + bots_manager: Bot orchestrator service dependency + + Returns: + Dictionary with status and data containing all active bot statuses + """ return {"status": "success", "data": bots_manager.get_all_bots_status()} @router.get("/mqtt") def get_mqtt_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): - """Get MQTT connection status and discovered bots.""" + """ + Get MQTT connection status and discovered bots. + + Args: + bots_manager: Bot orchestrator service dependency + + Returns: + Dictionary with MQTT connection status, discovered bots, and broker information + """ mqtt_connected = bots_manager.mqtt_manager.is_connected discovered_bots = bots_manager.mqtt_manager.get_discovered_bots() active_bots = list(bots_manager.active_bots.keys()) @@ -46,6 +62,19 @@ def get_mqtt_status(bots_manager: BotsOrchestrator = Depends(get_bots_orchestrat @router.get("/{bot_name}/status") def get_bot_status(bot_name: str, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + """ + Get the status of a specific bot. + + Args: + bot_name: Name of the bot to get status for + bots_manager: Bot orchestrator service dependency + + Returns: + Dictionary with bot status information + + Raises: + HTTPException: 404 if bot not found + """ response = bots_manager.get_bot_status(bot_name) if not response: raise HTTPException(status_code=404, detail="Bot not found") @@ -64,7 +93,20 @@ async def get_bot_history( timeout: float = 30.0, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator) ): - """Get trading history for a bot with optional parameters.""" + """ + Get trading history for a bot with optional parameters. + + Args: + bot_name: Name of the bot to get history for + days: Number of days of history to retrieve (0 for all) + verbose: Whether to include verbose output + precision: Decimal precision for numerical values + timeout: Timeout in seconds for the operation + bots_manager: Bot orchestrator service dependency + + Returns: + Dictionary with bot trading history + """ response = await bots_manager.get_bot_history( bot_name, days=days, @@ -77,6 +119,16 @@ async def get_bot_history( @router.post("/start-bot") async def start_bot(action: StartBotAction, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + """ + Start a bot with the specified configuration. + + Args: + action: StartBotAction containing bot configuration parameters + bots_manager: Bot orchestrator service dependency + + Returns: + Dictionary with status and response from bot start operation + """ response = await bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, conf=action.conf, async_backend=action.async_backend) return {"status": "success", "response": response} @@ -84,6 +136,16 @@ async def start_bot(action: StartBotAction, bots_manager: BotsOrchestrator = Dep @router.post("/stop-bot") async def stop_bot(action: StopBotAction, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): + """ + Stop a bot with the specified configuration. + + Args: + action: StopBotAction containing bot stop parameters + bots_manager: Bot orchestrator service dependency + + Returns: + Dictionary with status and response from bot stop operation + """ response = await bots_manager.stop_bot(action.bot_name, skip_order_cancellation=action.skip_order_cancellation, async_backend=action.async_backend) return {"status": "success", "response": response} @@ -222,7 +284,16 @@ async def create_hummingbot_instance( config: HummingbotInstanceConfig, docker_manager: DockerService = Depends(get_docker_service) ): - """Create a new Hummingbot instance with the specified configuration.""" + """ + Create a new Hummingbot instance with the specified configuration. + + Args: + config: Configuration for the new Hummingbot instance + docker_manager: Docker service dependency + + Returns: + Dictionary with creation response and instance details + """ logging.info(f"Creating hummingbot instance with config: {config}") response = docker_manager.create_hummingbot_instance(config) return response @@ -236,6 +307,16 @@ async def deploy_v2_controllers( """ Deploy a V2 strategy with controllers by generating the script config and creating the instance. This endpoint simplifies the deployment process for V2 controller strategies. + + Args: + deployment: V2ControllerDeployment configuration + docker_manager: Docker service dependency + + Returns: + Dictionary with deployment response and generated configuration details + + Raises: + HTTPException: 500 if deployment fails """ try: # Generate unique script config filename with timestamp diff --git a/routers/controllers.py b/routers/controllers.py index 3e77e95e..feb115ec 100644 --- a/routers/controllers.py +++ b/routers/controllers.py @@ -14,7 +14,12 @@ @router.get("/", response_model=Dict[str, List[str]]) async def list_controllers(): - """List all controllers organized by type.""" + """ + List all controllers organized by type. + + Returns: + Dictionary mapping controller types to lists of controller names + """ result = {} for controller_type in ControllerType: try: @@ -30,7 +35,15 @@ async def list_controllers(): @router.get("/{controller_type}", response_model=List[str]) async def list_controllers_by_type(controller_type: ControllerType): - """List controllers of a specific type.""" + """ + List controllers of a specific type. + + Args: + controller_type: Type of controllers to list + + Returns: + List of controller names for the specified type + """ try: files = file_system.list_files(f'controllers/{controller_type.value}') return [f.replace('.py', '') for f in files if f.endswith('.py') and f != "__init__.py"] @@ -40,7 +53,19 @@ async def list_controllers_by_type(controller_type: ControllerType): @router.get("/{controller_type}/{controller_name}", response_model=Dict[str, str]) async def get_controller(controller_type: ControllerType, controller_name: str): - """Get controller content by type and name.""" + """ + Get controller content by type and name. + + Args: + controller_type: Type of the controller + controller_name: Name of the controller + + Returns: + Dictionary with controller name, type, and content + + Raises: + HTTPException: 404 if controller not found + """ try: content = file_system.read_file(f"controllers/{controller_type.value}/{controller_name}.py") return { @@ -57,7 +82,19 @@ async def get_controller(controller_type: ControllerType, controller_name: str): @router.post("/{controller_type}", status_code=status.HTTP_201_CREATED) async def create_or_update_controller(controller_type: ControllerType, controller: Controller): - """Create or update a controller.""" + """ + Create or update a controller. + + Args: + controller_type: Type of controller to create/update + controller: Controller object with name, type, and content + + Returns: + Success message when controller is saved + + Raises: + HTTPException: 400 if controller type mismatch or save error + """ if controller.type != controller_type: raise HTTPException( status_code=400, @@ -78,7 +115,19 @@ async def create_or_update_controller(controller_type: ControllerType, controlle @router.delete("/{controller_type}/{controller_name}") async def delete_controller(controller_type: ControllerType, controller_name: str): - """Delete a controller.""" + """ + Delete a controller. + + Args: + controller_type: Type of the controller + controller_name: Name of the controller to delete + + Returns: + Success message when controller is deleted + + Raises: + HTTPException: 404 if controller not found + """ try: file_system.delete_file(f'controllers/{controller_type.value}', f"{controller_name}.py") return {"message": f"Controller '{controller_name}' deleted successfully from '{controller_type.value}'"} @@ -92,7 +141,18 @@ async def delete_controller(controller_type: ControllerType, controller_name: st # Controller Configuration endpoints @router.get("/{controller_name}/config", response_model=Dict) async def get_controller_config(controller_name: str): - """Get controller configuration.""" + """ + Get controller configuration. + + Args: + controller_name: Name of the controller to get config for + + Returns: + Dictionary with controller configuration + + Raises: + HTTPException: 404 if configuration not found + """ try: config = file_system.read_yaml_file(f"bots/conf/controllers/{controller_name}.yml") return config @@ -102,7 +162,19 @@ async def get_controller_config(controller_name: str): @router.get("/{controller_type}/{controller_name}/config/template", response_model=Dict) async def get_controller_config_template(controller_type: ControllerType, controller_name: str): - """Get controller configuration template with default values.""" + """ + Get controller configuration template with default values. + + Args: + controller_type: Type of the controller + controller_name: Name of the controller + + Returns: + Dictionary with configuration template and default values + + Raises: + HTTPException: 404 if controller configuration class not found + """ config_class = file_system.load_controller_config_class(controller_type.value, controller_name) if config_class is None: raise HTTPException( @@ -117,7 +189,19 @@ async def get_controller_config_template(controller_type: ControllerType, contro @router.post("/{controller_name}/config", status_code=status.HTTP_201_CREATED) async def create_or_update_controller_config(controller_name: str, config: Dict): - """Create or update controller configuration.""" + """ + Create or update controller configuration. + + Args: + controller_name: Name of the controller + config: Configuration dictionary to save + + Returns: + Success message when configuration is saved + + Raises: + HTTPException: 400 if save error occurs + """ try: yaml_content = yaml.dump(config, default_flow_style=False) file_system.add_file('conf/controllers', f"{controller_name}.yml", yaml_content, override=True) @@ -128,7 +212,18 @@ async def create_or_update_controller_config(controller_name: str, config: Dict) @router.delete("/{controller_name}/config") async def delete_controller_config(controller_name: str): - """Delete controller configuration.""" + """ + Delete controller configuration. + + Args: + controller_name: Name of the controller to delete config for + + Returns: + Success message when configuration is deleted + + Raises: + HTTPException: 404 if configuration not found + """ try: file_system.delete_file('conf/controllers', f"{controller_name}.yml") return {"message": f"Configuration for controller '{controller_name}' deleted successfully"} @@ -138,14 +233,30 @@ async def delete_controller_config(controller_name: str): @router.get("/configs/", response_model=List[str]) async def list_controller_configs(): - """List all controller configurations.""" + """ + List all controller configurations. + + Returns: + List of controller configuration names + """ return [f.replace('.yml', '') for f in file_system.list_files('conf/controllers') if f.endswith('.yml')] # Bot-specific controller config endpoints @router.get("/bots/{bot_name}/configs", response_model=List[Dict]) async def get_bot_controller_configs(bot_name: str): - """Get all controller configurations for a specific bot.""" + """ + Get all controller configurations for a specific bot. + + Args: + bot_name: Name of the bot to get configurations for + + Returns: + List of controller configurations for the bot + + Raises: + HTTPException: 404 if bot not found + """ bots_config_path = f"instances/{bot_name}/conf/controllers" if not file_system.path_exists(bots_config_path): raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") @@ -161,7 +272,20 @@ async def get_bot_controller_configs(bot_name: str): @router.post("/bots/{bot_name}/{controller_name}/config") async def update_bot_controller_config(bot_name: str, controller_name: str, config: Dict): - """Update controller configuration for a specific bot.""" + """ + Update controller configuration for a specific bot. + + Args: + bot_name: Name of the bot + controller_name: Name of the controller to update + config: Configuration dictionary to update with + + Returns: + Success message when configuration is updated + + Raises: + HTTPException: 404 if bot or controller not found, 400 if update error + """ bots_config_path = f"instances/{bot_name}/conf/controllers" if not file_system.path_exists(bots_config_path): raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") diff --git a/routers/databases.py b/routers/databases.py index 5973dcc8..40f253fc 100644 --- a/routers/databases.py +++ b/routers/databases.py @@ -16,11 +16,26 @@ @router.get("/", response_model=List[str]) async def list_databases(): + """ + List all available database files in the system. + + Returns: + List of database file paths + """ return file_system.list_databases() @router.post("/read", response_model=List[Dict[str, Any]]) async def read_databases(db_paths: List[str] = None): + """ + Read and extract data from multiple database files. + + Args: + db_paths: List of database file paths to read + + Returns: + List of database contents with tables and status information + """ dbs = [] for db_path in db_paths: db = HummingbotDatabase(db_path) @@ -53,6 +68,15 @@ async def read_databases(db_paths: List[str] = None): @router.post("/checkpoint", response_model=Dict[str, Any]) async def create_checkpoint(db_paths: List[str]): + """ + Create a checkpoint by consolidating data from multiple databases. + + Args: + db_paths: List of database paths to include in checkpoint + + Returns: + Dictionary with checkpoint creation status + """ try: dbs = await read_databases(db_paths) @@ -78,11 +102,29 @@ async def create_checkpoint(db_paths: List[str]): @router.get("/checkpoints", response_model=List[str]) async def list_checkpoints(full_path: bool): + """ + List all available checkpoint files. + + Args: + full_path: Whether to return full file paths or just filenames + + Returns: + List of checkpoint file paths or names + """ return file_system.list_checkpoints(full_path) @router.post("/checkpoints/load") async def load_checkpoint(checkpoint_path: str): + """ + Load data from a checkpoint file. + + Args: + checkpoint_path: Path to the checkpoint file to load + + Returns: + Dictionary with checkpoint data including executors, orders, trades, and controllers + """ try: etl = ETLPerformance(checkpoint_path) executor = etl.load_executors() diff --git a/routers/docker.py b/routers/docker.py index 3a75f926..6c0f03fb 100644 --- a/routers/docker.py +++ b/routers/docker.py @@ -13,11 +13,30 @@ @router.get("/running") async def is_docker_running(docker_manager: DockerService = Depends(get_docker_service)): + """ + Check if Docker daemon is running. + + Args: + docker_manager: Docker service dependency + + Returns: + Dictionary indicating if Docker is running + """ return {"is_docker_running": docker_manager.is_docker_running()} @router.get("/available-images/{image_name}") async def available_images(image_name: str, docker_manager: DockerService = Depends(get_docker_service)): + """ + Get available Docker images matching the specified name. + + Args: + image_name: Name pattern to search for in image tags + docker_manager: Docker service dependency + + Returns: + Dictionary with list of available image tags + """ available_images = docker_manager.get_available_images() image_tags = [tag for image in available_images["images"] for tag in image.tags if image_name in tag] return {"available_images": image_tags} @@ -25,21 +44,64 @@ async def available_images(image_name: str, docker_manager: DockerService = Depe @router.get("/active-containers") async def active_containers(docker_manager: DockerService = Depends(get_docker_service)): + """ + Get all currently active (running) Docker containers. + + Args: + docker_manager: Docker service dependency + + Returns: + List of active container information + """ return docker_manager.get_active_containers() @router.get("/exited-containers") async def exited_containers(docker_manager: DockerService = Depends(get_docker_service)): + """ + Get all exited (stopped) Docker containers. + + Args: + docker_manager: Docker service dependency + + Returns: + List of exited container information + """ return docker_manager.get_exited_containers() @router.post("/clean-exited-containers") async def clean_exited_containers(docker_manager: DockerService = Depends(get_docker_service)): + """ + Remove all exited Docker containers to free up space. + + Args: + docker_manager: Docker service dependency + + Returns: + Response from cleanup operation + """ return docker_manager.clean_exited_containers() @router.post("/remove-container/{container_name}") async def remove_container(container_name: str, archive_locally: bool = True, s3_bucket: str = None, docker_manager: DockerService = Depends(get_docker_service), bot_archiver: BotArchiver = Depends(get_bot_archiver)): + """ + Remove a Docker container and optionally archive its data. + + Args: + container_name: Name of the container to remove + archive_locally: Whether to archive data locally (default: True) + s3_bucket: S3 bucket name for cloud archiving (optional) + docker_manager: Docker service dependency + bot_archiver: Bot archiver service dependency + + Returns: + Response from container removal operation + + Raises: + HTTPException: 500 if archiving fails + """ # Remove the container response = docker_manager.remove_container(container_name) # Form the instance directory path correctly @@ -58,16 +120,49 @@ async def remove_container(container_name: str, archive_locally: bool = True, s3 @router.post("/stop-container/{container_name}") async def stop_container(container_name: str, docker_manager: DockerService = Depends(get_docker_service)): + """ + Stop a running Docker container. + + Args: + container_name: Name of the container to stop + docker_manager: Docker service dependency + + Returns: + Response from container stop operation + """ return docker_manager.stop_container(container_name) @router.post("/start-container/{container_name}") async def start_container(container_name: str, docker_manager: DockerService = Depends(get_docker_service)): + """ + Start a stopped Docker container. + + Args: + container_name: Name of the container to start + docker_manager: Docker service dependency + + Returns: + Response from container start operation + """ return docker_manager.start_container(container_name) @router.post("/pull-image/") async def pull_image(image: ImageName, docker_manager: DockerService = Depends(get_docker_service)): + """ + Pull a Docker image from a registry. + + Args: + image: ImageName object containing the image name to pull + docker_manager: Docker service dependency + + Returns: + Result of the image pull operation + + Raises: + HTTPException: 400 if pull operation fails + """ try: result = docker_manager.pull_image(image.image_name) return result diff --git a/routers/market_data.py b/routers/market_data.py index 6686c781..ce2adeff 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -91,6 +91,9 @@ async def get_active_feeds(request: Request): """ Get information about currently active market data feeds. + Args: + request: FastAPI request object to access application state + Returns: Dictionary with active feeds information including last access times and expiration """ @@ -107,7 +110,7 @@ async def get_market_data_settings(): Get current market data settings for debugging. Returns: - Current market data configuration + Dictionary with current market data configuration including cleanup and timeout settings """ from config import settings return { diff --git a/routers/performance.py b/routers/performance.py index 81ae9ad9..4469458d 100644 --- a/routers/performance.py +++ b/routers/performance.py @@ -10,13 +10,22 @@ @router.post("/results") async def get_performance_results(payload: Dict[str, Any]): + """ + Calculate performance results from executor data. + + Args: + payload: Dictionary containing executors data for performance analysis + + Returns: + Dictionary with executors and calculated performance results + """ executors = payload.get("executors") data_source = PerformanceDataSource(executors) performance_results = {} try: backtesting_engine = BacktestingEngineBase() executor_info_list = data_source.executor_info_list - performance_results["results"] = backtesting_engine.summarize_results(executor_info_list ) + performance_results["results"] = backtesting_engine.summarize_results(executor_info_list) results = performance_results["results"] results["sharpe_ratio"] = results["sharpe_ratio"] if results["sharpe_ratio"] is not None else 0 return { diff --git a/routers/scripts.py b/routers/scripts.py index c312c168..75356cb4 100644 --- a/routers/scripts.py +++ b/routers/scripts.py @@ -14,13 +14,29 @@ @router.get("/", response_model=List[str]) async def list_scripts(): - """List all available scripts.""" + """ + List all available scripts. + + Returns: + List of script names (without .py extension) + """ return [f.replace('.py', '') for f in file_system.list_files('scripts') if f.endswith('.py')] @router.get("/{script_name}", response_model=Dict[str, str]) async def get_script(script_name: str): - """Get script content by name.""" + """ + Get script content by name. + + Args: + script_name: Name of the script to retrieve + + Returns: + Dictionary with script name and content + + Raises: + HTTPException: 404 if script not found + """ try: content = file_system.read_file(f"scripts/{script_name}.py") return { @@ -33,7 +49,18 @@ async def get_script(script_name: str): @router.post("/", status_code=status.HTTP_201_CREATED) async def create_or_update_script(script: Script): - """Create or update a script.""" + """ + Create or update a script. + + Args: + script: Script object with name and content + + Returns: + Success message when script is saved + + Raises: + HTTPException: 400 if save error occurs + """ try: file_system.add_file('scripts', f"{script.name}.py", script.content, override=True) return {"message": f"Script '{script.name}' saved successfully"} @@ -43,7 +70,18 @@ async def create_or_update_script(script: Script): @router.delete("/{script_name}") async def delete_script(script_name: str): - """Delete a script.""" + """ + Delete a script. + + Args: + script_name: Name of the script to delete + + Returns: + Success message when script is deleted + + Raises: + HTTPException: 404 if script not found + """ try: file_system.delete_file('scripts', f"{script_name}.py") return {"message": f"Script '{script_name}' deleted successfully"} @@ -54,7 +92,18 @@ async def delete_script(script_name: str): # Script Configuration endpoints @router.get("/{script_name}/config", response_model=Dict) async def get_script_config(script_name: str): - """Get script configuration.""" + """ + Get script configuration. + + Args: + script_name: Name of the script to get config for + + Returns: + Dictionary with script configuration + + Raises: + HTTPException: 404 if configuration not found + """ try: config = file_system.read_yaml_file(f"bots/conf/scripts/{script_name}.yml") return config @@ -64,7 +113,18 @@ async def get_script_config(script_name: str): @router.get("/{script_name}/config/template", response_model=Dict) async def get_script_config_template(script_name: str): - """Get script configuration template with default values.""" + """ + Get script configuration template with default values. + + Args: + script_name: Name of the script to get template for + + Returns: + Dictionary with configuration template and default values + + Raises: + HTTPException: 404 if script configuration class not found + """ config_class = file_system.load_script_config_class(script_name) if config_class is None: raise HTTPException(status_code=404, detail=f"Script configuration class for '{script_name}' not found") @@ -76,7 +136,19 @@ async def get_script_config_template(script_name: str): @router.post("/{script_name}/config", status_code=status.HTTP_201_CREATED) async def create_or_update_script_config(script_name: str, config: Dict): - """Create or update script configuration.""" + """ + Create or update script configuration. + + Args: + script_name: Name of the script + config: Configuration dictionary to save + + Returns: + Success message when configuration is saved + + Raises: + HTTPException: 400 if save error occurs + """ try: yaml_content = yaml.dump(config, default_flow_style=False) file_system.add_file('conf/scripts', f"{script_name}.yml", yaml_content, override=True) @@ -87,7 +159,18 @@ async def create_or_update_script_config(script_name: str, config: Dict): @router.delete("/{script_name}/config") async def delete_script_config(script_name: str): - """Delete script configuration.""" + """ + Delete script configuration. + + Args: + script_name: Name of the script to delete config for + + Returns: + Success message when configuration is deleted + + Raises: + HTTPException: 404 if configuration not found + """ try: file_system.delete_file('conf/scripts', f"{script_name}.yml") return {"message": f"Configuration for script '{script_name}' deleted successfully"} @@ -97,5 +180,10 @@ async def delete_script_config(script_name: str): @router.get("/configs/", response_model=List[str]) async def list_script_configs(): - """List all script configurations.""" + """ + List all script configurations. + + Returns: + List of script configuration names + """ return [f.replace('.yml', '') for f in file_system.list_files('conf/scripts') if f.endswith('.yml')] \ No newline at end of file From 4ae18a1dbce9ba00de486dd653569ac9c2ed084b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 13:55:50 +0800 Subject: [PATCH 062/244] (feat) add usd as default kraken --- services/accounts_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/accounts_service.py b/services/accounts_service.py index 7bc9b22d..75af40cb 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -26,6 +26,7 @@ class AccountsService: "hyperliquid": "USD", "hyperliquid_perpetual": "USDC", "xrpl": "RLUSD", + "kraken": "USD", } def __init__(self, From f1b252aaa36fd3d28895570c9124af3594ec8cd6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 15:21:07 +0800 Subject: [PATCH 063/244] (feat) normalize credentials route --- routers/accounts.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index 62533553..a945538e 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -193,25 +193,25 @@ async def delete_credential(account_name: str, connector_name: str, accounts_ser raise HTTPException(status_code=404, detail=str(e)) -@router.post("/add-connector-keys/{account_name}/{connector_name}", status_code=status.HTTP_201_CREATED) -async def add_connector_keys(account_name: str, connector_name: str, keys: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): +@router.post("/add-credential/{account_name}/{connector_name}", status_code=status.HTTP_201_CREATED) +async def add_credential(account_name: str, connector_name: str, credentials: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): """ - Add or update connector keys (API credentials) for a specific account and connector. + Add or update connector credentials (API keys) for a specific account and connector. Args: account_name: Name of the account connector_name: Name of the connector - keys: Dictionary containing the connector credentials + credentials: Dictionary containing the connector credentials Returns: - Success message when keys are added + Success message when credentials are added Raises: - HTTPException: 400 if there's an error adding the keys + HTTPException: 400 if there's an error adding the credentials """ try: - await accounts_service.add_connector_keys(account_name, connector_name, keys) - return {"message": "Connector keys added successfully."} + await accounts_service.add_credentials(account_name, connector_name, credentials) + return {"message": "Connector credentials added successfully."} except Exception as e: accounts_service.delete_credentials(account_name, connector_name) raise HTTPException(status_code=400, detail=str(e)) From 7de4f96ebd7a4b16882ad3c3f6410c0588283e58 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 15:21:16 +0800 Subject: [PATCH 064/244] (feat) normalize credentials name --- services/accounts_service.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 75af40cb..02cc516c 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -293,8 +293,8 @@ def get_connector_config_map(self, connector_name: str): """ return self.connector_manager.get_connector_config_map(connector_name) - async def add_connector_keys(self, account_name: str, connector_name: str, keys: dict): - new_connector = await self.connector_manager.update_connector_keys(account_name, connector_name, keys) + async def add_credentials(self, account_name: str, connector_name: str, credentials: dict): + new_connector = await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) self.accounts[account_name][connector_name] = new_connector await self.update_account_state() await self.dump_account_state() From f56cc9e702e630220ad31c4fd5a8014e379c84d6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 15:21:22 +0800 Subject: [PATCH 065/244] (feat) adapt readme --- README.md | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 383b74a1..04651c1c 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,32 @@ Backend-api is a dedicated solution for managing Hummingbot instances. It offers ## Getting Started -### Conda Installation +### Development Setup + +1. **Initial Setup**: + - Run the setup script to configure environment variables and start required containers (EMQX and PostgreSQL): + ```bash + ./setup.sh + ``` + - This script will set up the `.env` file and start the necessary Docker containers for the message broker and database. + +2. **Development Mode**: + - Use the run script with the `--dev` flag to run the API from source: + ```bash + ./run.sh --dev + ``` + - This will activate the conda environment and run the API with uvicorn for development with hot reload. + +3. **Production Mode**: + - Use the run script without flags to run with Docker Compose: + ```bash + ./run.sh + ``` + - This will start all services using Docker Compose in detached mode. + +### Manual Setup (Alternative) + +#### Conda Installation 1. Install the environment using Conda: ```bash conda env create -f environment.yml @@ -21,13 +46,13 @@ Backend-api is a dedicated solution for managing Hummingbot instances. It offers conda activate backend-api ``` -### Running the API with Conda +#### Running the API with Conda Run the API using uvicorn with the following command: ```bash uvicorn main:app --reload ``` -### Docker Installation and Running the API +#### Docker Installation and Running the API For running the project using Docker, follow these steps: 1. **Set up Environment Variables**: From 1a9476c84496dc9c9e752442c929a2c7ac1d1ac8 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 15:21:30 +0800 Subject: [PATCH 066/244] (feat) --- run.sh | 16 ++++++++++++++++ setup.sh | 12 +++++++++++- 2 files changed, 27 insertions(+), 1 deletion(-) create mode 100755 run.sh diff --git a/run.sh b/run.sh new file mode 100755 index 00000000..d0b049a0 --- /dev/null +++ b/run.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Run script for Backend API +# Usage: ./run.sh [--dev] +# --dev: Run API from source using uvicorn +# Without --dev: Run using docker compose + +if [[ "$1" == "--dev" ]]; then + echo "Running API from source..." + # Activate conda environment and run with uvicorn + conda activate backend-api + uvicorn main:app --reload +else + echo "Running with Docker Compose..." + docker compose up -d +fi \ No newline at end of file diff --git a/setup.sh b/setup.sh index 5bd6df9d..53d869e2 100755 --- a/setup.sh +++ b/setup.sh @@ -145,4 +145,14 @@ echo "" echo -e "${PURPLE}💡 Pro tip:${NC} You can modify environment variables in .env file anytime" echo -e "${PURPLE}📚 Documentation:${NC} Check config.py for all available settings" echo -e "${PURPLE}🔒 Security:${NC} The password verification file secures bot credentials" -echo "" \ No newline at end of file +echo "" +echo -e "${GREEN}🐳 Starting required Docker containers and pulling Hummingbot image...${NC}" + +# Run docker operations in parallel +docker compose up emqx postgres -d & +docker pull hummingbot/hummingbot:latest & + +# Wait for both operations to complete +wait + +echo -e "${GREEN}✅ All Docker operations completed!${NC}" From d7ac8018f2084010cc8ebe97dc0973f09d458960 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 16:19:00 +0800 Subject: [PATCH 067/244] (feat) fix dockerfile --- Dockerfile | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 206f8616..81dc0eff 100644 --- a/Dockerfile +++ b/Dockerfile @@ -18,7 +18,7 @@ RUN conda env create -f environment.yml && \ rm -rf /root/.cache/pip/* # Stage 2: Runtime stage -FROM continuumio/miniconda3-slim +FROM continuumio/miniconda3 # Install only runtime dependencies RUN apt-get update && \ @@ -33,7 +33,8 @@ COPY --from=builder /opt/conda/envs/backend-api /opt/conda/envs/backend-api WORKDIR /backend-api # Copy only necessary application files -COPY main.py config.py deps.py models.py ./ +COPY main.py config.py deps.py ./ +COPY models ./models COPY routers ./routers COPY services ./services COPY utils ./utils @@ -44,6 +45,9 @@ COPY bots/scripts ./bots/scripts # Create necessary directories RUN mkdir -p bots/instances bots/conf bots/credentials bots/data +# Expose port +EXPOSE 8000 + # Set environment variables to ensure conda env is used ENV PATH="/opt/conda/envs/backend-api/bin:$PATH" ENV CONDA_DEFAULT_ENV=backend-api From 8ca034dedda473c5e13d26afa1376c4ff16e2acb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 16:59:13 +0800 Subject: [PATCH 068/244] (feat) improve files module --- models/bot.py | 41 ++++++++++++++++++++++++++++------------- 1 file changed, 28 insertions(+), 13 deletions(-) diff --git a/models/bot.py b/models/bot.py index 3a447c1e..df66aefb 100644 --- a/models/bot.py +++ b/models/bot.py @@ -9,25 +9,40 @@ class ControllerType(str, Enum): GENERIC = "generic" -class Script(BaseModel): - name: str = Field(description="Script name (without .py extension)") - content: str = Field(description="Python script content") +class FileContent(BaseModel): + """Base model for file content""" + content: str = Field(description="File content") -class ScriptConfig(BaseModel): - name: str = Field(description="Config name (without .yml extension)") - content: Dict[str, Any] = Field(description="YAML content as dictionary") +class ConfigContent(BaseModel): + """Base model for configuration content""" + content: Dict[str, Any] = Field(description="Configuration content as dictionary") -class Controller(BaseModel): - name: str = Field(description="Controller name (without .py extension)") - type: ControllerType = Field(description="Controller category") - content: str = Field(description="Python controller content") +class TypedFileContent(FileContent): + """File content with a type classification""" + type: Optional[ControllerType] = Field(default=None, description="Content category") -class ControllerConfig(BaseModel): - name: str = Field(description="Config name (without .yml extension)") - content: Dict[str, Any] = Field(description="YAML content as dictionary") +# Specific models using base classes +class Script(FileContent): + """Python script content""" + pass + + +class ScriptConfig(ConfigContent): + """Script configuration content""" + pass + + +class Controller(TypedFileContent): + """Controller content with optional type (type can come from URL path)""" + pass + + +class ControllerConfig(ConfigContent): + """Controller configuration content""" + pass class BotAction(BaseModel): From e8c57349a9f803dd8b03f310bf4a3e66ae1c64a6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 16:59:28 +0800 Subject: [PATCH 069/244] (feat) improve file system utils --- utils/file_system.py | 302 +++++++++++++++++++++++++++++++------------ 1 file changed, 217 insertions(+), 85 deletions(-) diff --git a/utils/file_system.py b/utils/file_system.py index da108d51..cfbc4d61 100644 --- a/utils/file_system.py +++ b/utils/file_system.py @@ -5,7 +5,7 @@ import shutil import sys from pathlib import Path -from typing import List, Optional +from typing import List, Optional, Type import yaml from hummingbot.client.config.config_data_types import BaseClientModel @@ -19,6 +19,8 @@ class FileSystemUtil: """ FileSystemUtil provides utility functions for file and directory management, as well as dynamic loading of script configurations. + + All file operations are performed relative to the base_path unless an absolute path is provided. """ base_path: str = "bots" # Default base path @@ -29,15 +31,29 @@ def __init__(self, base_path: Optional[str] = None): """ if base_path: self.base_path = base_path + + def _get_full_path(self, path: str) -> str: + """ + Get the full path by combining base_path with relative path. + :param path: Relative or absolute path. + :return: Full absolute path. + """ + return path if os.path.isabs(path) else os.path.join(self.base_path, path) def list_files(self, directory: str) -> List[str]: """ Lists all files in a given directory. :param directory: The directory to list files from. :return: List of file names in the directory. + :raises FileNotFoundError: If the directory does not exist. + :raises PermissionError: If access is denied to the directory. """ excluded_files = ["__init__.py", "__pycache__", ".DS_Store", ".dockerignore", ".gitignore"] - dir_path = os.path.join(self.base_path, directory) + dir_path = self._get_full_path(directory) + if not os.path.exists(dir_path): + raise FileNotFoundError(f"Directory '{directory}' not found") + if not os.path.isdir(dir_path): + raise NotADirectoryError(f"Path '{directory}' is not a directory") return [f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f)) and f not in excluded_files] def list_folders(self, directory: str) -> List[str]: @@ -45,62 +61,97 @@ def list_folders(self, directory: str) -> List[str]: Lists all folders in a given directory. :param directory: The directory to list folders from. :return: List of folder names in the directory. - """ - dir_path = os.path.join(self.base_path, directory) + :raises FileNotFoundError: If the directory does not exist. + :raises PermissionError: If access is denied to the directory. + """ + dir_path = self._get_full_path(directory) + if not os.path.exists(dir_path): + raise FileNotFoundError(f"Directory '{directory}' not found") + if not os.path.isdir(dir_path): + raise NotADirectoryError(f"Path '{directory}' is not a directory") return [d for d in os.listdir(dir_path) if os.path.isdir(os.path.join(dir_path, d))] - def create_folder(self, directory: str, folder_name: str): + def create_folder(self, directory: str, folder_name: str) -> None: """ Creates a folder in a specified directory. :param directory: The directory to create the folder in. :param folder_name: The name of the folder to be created. + :raises PermissionError: If permission is denied to create the folder. + :raises OSError: If there's an OS-level error creating the folder. """ - folder_path = os.path.join(self.base_path, directory, folder_name) + if not folder_name or '/' in folder_name or '\\' in folder_name: + raise ValueError(f"Invalid folder name: '{folder_name}'") + folder_path = self._get_full_path(os.path.join(directory, folder_name)) os.makedirs(folder_path, exist_ok=True) - def copy_folder(self, src: str, dest: str): + def copy_folder(self, src: str, dest: str) -> None: """ Copies a folder to a new location. :param src: The source folder to copy. :param dest: The destination folder to copy to. + :raises FileNotFoundError: If source folder doesn't exist. + :raises PermissionError: If permission is denied. """ - src_path = os.path.join(self.base_path, src) - dest_path = os.path.join(self.base_path, dest) - os.makedirs(dest_path, exist_ok=True) - for item in os.listdir(src_path): - s = os.path.join(src_path, item) - d = os.path.join(dest_path, item) - if os.path.isdir(s): - self.copy_folder(s, d) - else: - shutil.copy2(s, d) + src_path = self._get_full_path(src) + dest_path = self._get_full_path(dest) + + if not os.path.exists(src_path): + raise FileNotFoundError(f"Source folder '{src}' not found") + if not os.path.isdir(src_path): + raise NotADirectoryError(f"Source path '{src}' is not a directory") + + shutil.copytree(src_path, dest_path, dirs_exist_ok=True) - def copy_file(self, src: str, dest: str): + def copy_file(self, src: str, dest: str) -> None: """ Copies a file to a new location. :param src: The source file to copy. :param dest: The destination file to copy to. + :raises FileNotFoundError: If source file doesn't exist. + :raises PermissionError: If permission is denied. """ - src_path = os.path.join(self.base_path, src) - dest_path = os.path.join(self.base_path, dest) + src_path = self._get_full_path(src) + dest_path = self._get_full_path(dest) + + if not os.path.exists(src_path): + raise FileNotFoundError(f"Source file '{src}' not found") + if os.path.isdir(src_path): + raise IsADirectoryError(f"Source path '{src}' is a directory, not a file") + + # Ensure destination directory exists + dest_dir = os.path.dirname(dest_path) + os.makedirs(dest_dir, exist_ok=True) + shutil.copy2(src_path, dest_path) - def delete_folder(self, directory: str, folder_name: str): + def delete_folder(self, directory: str, folder_name: str) -> None: """ Deletes a folder in a specified directory. :param directory: The directory to delete the folder from. :param folder_name: The name of the folder to be deleted. - """ - folder_path = os.path.join(self.base_path, directory, folder_name) + :raises FileNotFoundError: If folder doesn't exist. + :raises PermissionError: If permission is denied. + """ + folder_path = self._get_full_path(os.path.join(directory, folder_name)) + if not os.path.exists(folder_path): + raise FileNotFoundError(f"Folder '{folder_name}' not found in '{directory}'") + if not os.path.isdir(folder_path): + raise NotADirectoryError(f"Path '{folder_name}' is not a directory") shutil.rmtree(folder_path) - def delete_file(self, directory: str, file_name: str): + def delete_file(self, directory: str, file_name: str) -> None: """ Deletes a file in a specified directory. :param directory: The directory to delete the file from. :param file_name: The name of the file to be deleted. - """ - file_path = os.path.join(self.base_path, directory, file_name) + :raises FileNotFoundError: If file doesn't exist. + :raises PermissionError: If permission is denied. + """ + file_path = self._get_full_path(os.path.join(directory, file_name)) + if not os.path.exists(file_path): + raise FileNotFoundError(f"File '{file_name}' not found in '{directory}'") + if os.path.isdir(file_path): + raise IsADirectoryError(f"Path '{file_name}' is a directory, not a file") os.remove(file_path) def path_exists(self, path: str) -> bool: @@ -109,56 +160,101 @@ def path_exists(self, path: str) -> bool: :param path: The path to check. :return: True if the path exists, False otherwise. """ - return os.path.exists(os.path.join(self.base_path, path)) + return os.path.exists(self._get_full_path(path)) - def add_file(self, directory: str, file_name: str, content: str, override: bool = False): + def add_file(self, directory: str, file_name: str, content: str, override: bool = False) -> None: """ Adds a file to a specified directory. :param directory: The directory to add the file to. :param file_name: The name of the file to be added. :param content: The content to be written to the file. :param override: If True, override the file if it exists. + :raises ValueError: If file_name is invalid. + :raises FileExistsError: If file exists and override is False. + :raises PermissionError: If permission is denied to write the file. """ - file_path = os.path.join(self.base_path, directory, file_name) + if not file_name or '/' in file_name or '\\' in file_name: + raise ValueError(f"Invalid file name: '{file_name}'") + + dir_path = self._get_full_path(directory) + os.makedirs(dir_path, exist_ok=True) + + file_path = os.path.join(dir_path, file_name) if not override and os.path.exists(file_path): raise FileExistsError(f"File '{file_name}' already exists in '{directory}'.") - with open(file_path, 'w') as file: + + with open(file_path, 'w', encoding='utf-8') as file: file.write(content) - def append_to_file(self, directory: str, file_name: str, content: str): + def append_to_file(self, directory: str, file_name: str, content: str) -> None: """ Appends content to a specified file. :param directory: The directory containing the file. :param file_name: The name of the file to append to. :param content: The content to append to the file. - """ - file_path = os.path.join(self.base_path, directory, file_name) - with open(file_path, 'a') as file: + :raises FileNotFoundError: If file doesn't exist. + :raises PermissionError: If permission is denied. + """ + file_path = self._get_full_path(os.path.join(directory, file_name)) + if not os.path.exists(file_path): + raise FileNotFoundError(f"File '{file_name}' not found in '{directory}'") + if os.path.isdir(file_path): + raise IsADirectoryError(f"Path '{file_name}' is a directory, not a file") + + with open(file_path, 'a', encoding='utf-8') as file: file.write(content) - @staticmethod - def dump_dict_to_yaml(filename, data_dict): + def read_file(self, file_path: str) -> str: + """ + Reads the content of a file. + :param file_path: The relative path to the file from base_path. + :return: The content of the file as a string. + :raises FileNotFoundError: If the file does not exist. + :raises PermissionError: If access is denied to the file. + :raises IsADirectoryError: If the path points to a directory. + """ + full_path = self._get_full_path(file_path) + if not os.path.exists(full_path): + raise FileNotFoundError(f"File '{file_path}' not found") + if os.path.isdir(full_path): + raise IsADirectoryError(f"Path '{file_path}' is a directory, not a file") + + with open(full_path, 'r', encoding='utf-8') as file: + return file.read() + + def dump_dict_to_yaml(self, filename: str, data_dict: dict) -> None: """ Dumps a dictionary to a YAML file. + :param filename: The file to dump the dictionary into (relative to base_path). :param data_dict: The dictionary to dump. - :param filename: The file to dump the dictionary into. + :raises PermissionError: If permission is denied to write the file. """ - with open(filename, 'w') as file: - yaml.dump(data_dict, file) + file_path = self._get_full_path(filename) + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, 'w', encoding='utf-8') as file: + yaml.dump(data_dict, file, default_flow_style=False, allow_unicode=True) - @staticmethod - def read_yaml_file(file_path): + def read_yaml_file(self, file_path: str) -> dict: """ Reads a YAML file and returns the data as a dictionary. - :param file_path: The path to the YAML file. + :param file_path: The path to the YAML file (relative to base_path or absolute). :return: Dictionary containing the YAML file data. + :raises FileNotFoundError: If the file doesn't exist. + :raises yaml.YAMLError: If the YAML is invalid. """ - with open(file_path, 'r') as file: - data = yaml.safe_load(file) - return data + full_path = self._get_full_path(file_path) if not os.path.isabs(file_path) else file_path + if not os.path.exists(full_path): + raise FileNotFoundError(f"YAML file '{file_path}' not found") + + with open(full_path, 'r', encoding='utf-8') as file: + try: + data = yaml.safe_load(file) + return data if data is not None else {} + except yaml.YAMLError as e: + raise yaml.YAMLError(f"Invalid YAML in file '{file_path}': {e}") @staticmethod - def load_script_config_class(script_name): + def load_script_config_class(script_name: str) -> Optional[Type[BaseClientModel]]: """ Dynamically loads a script's configuration class. :param script_name: The name of the script file (without the '.py' extension). @@ -176,14 +272,15 @@ def load_script_config_class(script_name): for _, cls in inspect.getmembers(script_module, inspect.isclass): if issubclass(cls, BaseClientModel) and cls is not BaseClientModel: return cls - except Exception as e: - print(f"Error loading script class: {e}") # Handle or log the error appropriately + except (ImportError, AttributeError, ModuleNotFoundError) as e: + logging.warning(f"Error loading script class for '{script_name}': {e}") return None @staticmethod - def load_controller_config_class(controller_type: str, controller_name: str): + def load_controller_config_class(controller_type: str, controller_name: str) -> Optional[Type]: """ Dynamically loads a controller's configuration class. + :param controller_type: The type of the controller. :param controller_name: The name of the controller file (without the '.py' extension). :return: The configuration class from the controller, or None if not found. """ @@ -201,35 +298,49 @@ def load_controller_config_class(controller_type: str, controller_name: str): or (issubclass(cls, MarketMakingControllerConfigBase) and cls is not MarketMakingControllerConfigBase)\ or (issubclass(cls, ControllerConfigBase) and cls is not ControllerConfigBase): return cls - except Exception as e: - print(f"Error loading controller class: {e}") + except (ImportError, AttributeError, ModuleNotFoundError) as e: + logging.warning(f"Error loading controller class for '{controller_type}.{controller_name}': {e}") + return None - @staticmethod - def ensure_file_and_dump_text(file_path, text): + def ensure_file_and_dump_text(self, file_path: str, text: str) -> None: """ - Ensures that the directory for the file exists, then dumps the dictionary to a YAML file. - :param file_path: The file path to dump the dictionary into. - :param text: The text to dump. + Ensures that the directory for the file exists, then writes text to a file. + :param file_path: The file path to write to (relative to base_path or absolute). + :param text: The text to write. + :raises PermissionError: If permission is denied. """ - os.makedirs(os.path.dirname(file_path), exist_ok=True) - with open(file_path, "w") as f: + full_path = self._get_full_path(file_path) if not os.path.isabs(file_path) else file_path + os.makedirs(os.path.dirname(full_path), exist_ok=True) + with open(full_path, "w", encoding='utf-8') as f: f.write(text) - @staticmethod - # TODO: make paths relative - def get_connector_keys_path(account_name: str, connector_name: str) -> Path: - return Path(f"bots/credentials/{account_name}/connectors/{connector_name}.yml") + def get_connector_keys_path(self, account_name: str, connector_name: str) -> Path: + """ + Get the path to connector credentials file. + :param account_name: Name of the account. + :param connector_name: Name of the connector. + :return: Path to the connector credentials file. + """ + return Path(self.base_path) / "credentials" / account_name / "connectors" / f"{connector_name}.yml" - @staticmethod - def save_model_to_yml(yml_path: Path, cm: ClientConfigAdapter): + def save_model_to_yml(self, yml_path: str, cm: ClientConfigAdapter) -> None: + """ + Save a ClientConfigAdapter model to a YAML file. + :param yml_path: Path to the YAML file (relative to base_path or absolute). + :param cm: The ClientConfigAdapter to save. + :raises PermissionError: If permission is denied to write the file. + """ try: + full_path = self._get_full_path(yml_path) cm_yml_str = cm.generate_yml_output_str_with_comments() - with open(yml_path, "w", encoding="utf-8") as outfile: + os.makedirs(os.path.dirname(full_path), exist_ok=True) + with open(full_path, "w", encoding="utf-8") as outfile: outfile.write(cm_yml_str) except Exception as e: - logging.error("Error writing configs: %s" % (str(e),), exc_info=True) + logging.error(f"Error writing configs to '{yml_path}': {e}", exc_info=True) + raise - def get_base_path(self): + def get_base_path(self) -> str: """ Returns the base path for file operations :return: The base path string @@ -245,7 +356,7 @@ def get_directory_creation_time(self, path): import os import datetime - full_path = os.path.join(self.base_path, path) + full_path = self._get_full_path(path) if not os.path.exists(full_path): return None @@ -267,7 +378,7 @@ def list_directories(self, path): """ import os - full_path = os.path.join(self.base_path, path) + full_path = self._get_full_path(path) if not os.path.exists(full_path): return [] @@ -277,32 +388,53 @@ def list_directories(self, path): except Exception: return [] - def list_databases(self): + def list_databases(self) -> List[str]: """ Lists all database files in archived instances :return: List of database file paths """ - archived_path = os.path.join(self.base_path, "archived") - archived_instances = self.list_folders("archived") + try: + archived_instances = self.list_folders("archived") + except FileNotFoundError: + return [] + archived_databases = [] for archived_instance in archived_instances: - db_path = os.path.join(archived_path, archived_instance, "data") - archived_databases += [os.path.join(db_path, db_file) for db_file in os.listdir(db_path) - if db_file.endswith(".sqlite")] + db_path = self._get_full_path(os.path.join("archived", archived_instance, "data")) + try: + if os.path.exists(db_path): + archived_databases.extend([ + os.path.join(db_path, db_file) + for db_file in os.listdir(db_path) + if db_file.endswith(".sqlite") + ]) + except (OSError, PermissionError) as e: + logging.warning(f"Error accessing database path '{db_path}': {e}") return archived_databases - def list_checkpoints(self, full_path: bool): + def list_checkpoints(self, full_path: bool = False) -> List[str]: """ Lists all checkpoint database files :param full_path: If True, return full paths, otherwise just filenames :return: List of checkpoint database files """ - dir_path = os.path.join(self.base_path, "data") - if full_path: - checkpoints = [os.path.join(dir_path, f) for f in os.listdir(dir_path) if - os.path.isfile(os.path.join(dir_path, f)) - and f.startswith("checkpoint") and f.endswith(".sqlite")] - else: - checkpoints = [f for f in os.listdir(dir_path) if os.path.isfile(os.path.join(dir_path, f)) - and f.startswith("checkpoint") and f.endswith(".sqlite")] - return checkpoints + dir_path = self._get_full_path("data") + if not os.path.exists(dir_path): + return [] + + try: + files = os.listdir(dir_path) + checkpoint_files = [ + f for f in files + if (os.path.isfile(os.path.join(dir_path, f)) + and f.startswith("checkpoint") + and f.endswith(".sqlite")) + ] + + if full_path: + return [os.path.join(dir_path, f) for f in checkpoint_files] + else: + return checkpoint_files + except (OSError, PermissionError) as e: + logging.warning(f"Error listing checkpoints in '{dir_path}': {e}") + return [] From c40a2af8e5ad6bf9684c7a9a3434f3489c1ac9ba Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 16:59:38 +0800 Subject: [PATCH 070/244] (feat) std controllers routes --- routers/controllers.py | 197 ++++++++++++++++++++++------------------- 1 file changed, 105 insertions(+), 92 deletions(-) diff --git a/routers/controllers.py b/routers/controllers.py index feb115ec..49b9a310 100644 --- a/routers/controllers.py +++ b/routers/controllers.py @@ -33,22 +33,108 @@ async def list_controllers(): return result -@router.get("/{controller_type}", response_model=List[str]) -async def list_controllers_by_type(controller_type: ControllerType): +# Controller Configuration endpoints (must come before controller type routes) +@router.get("/configs/", response_model=List[Dict]) +async def list_controller_configs(): + """ + List all controller configurations with metadata. + + Returns: + List of controller configuration objects with name, controller_name, controller_type, and other metadata + """ + try: + config_files = [f for f in file_system.list_files('conf/controllers') if f.endswith('.yml')] + configs = [] + + for config_file in config_files: + config_name = config_file.replace('.yml', '') + try: + config = file_system.read_yaml_file(f"conf/controllers/{config_file}") + configs.append({ + "config_name": config_name, + "controller_name": config.get("controller_name", "unknown"), + "controller_type": config.get("controller_type", "unknown"), + "connector_name": config.get("connector_name", "unknown"), + "trading_pair": config.get("trading_pair", "unknown"), + "total_amount_quote": config.get("total_amount_quote", 0) + }) + except Exception as e: + # If config is malformed, still include it with basic info + configs.append({ + "config_name": config_name, + "controller_name": "error", + "controller_type": "error", + "error": str(e) + }) + + return configs + except FileNotFoundError: + return [] + + +@router.get("/configs/{config_name}", response_model=Dict) +async def get_controller_config(config_name: str): """ - List controllers of a specific type. + Get controller configuration by config name. Args: - controller_type: Type of controllers to list + config_name: Name of the configuration file to retrieve Returns: - List of controller names for the specified type + Dictionary with controller configuration + + Raises: + HTTPException: 404 if configuration not found """ try: - files = file_system.list_files(f'controllers/{controller_type.value}') - return [f.replace('.py', '') for f in files if f.endswith('.py') and f != "__init__.py"] + config = file_system.read_yaml_file(f"conf/controllers/{config_name}.yml") + return config except FileNotFoundError: - return [] + raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") + + +@router.post("/configs/{config_name}", status_code=status.HTTP_201_CREATED) +async def create_or_update_controller_config(config_name: str, config: Dict): + """ + Create or update controller configuration. + + Args: + config_name: Name of the configuration file + config: Configuration dictionary to save + + Returns: + Success message when configuration is saved + + Raises: + HTTPException: 400 if save error occurs + """ + try: + yaml_content = yaml.dump(config, default_flow_style=False) + file_system.add_file('conf/controllers', f"{config_name}.yml", yaml_content, override=True) + return {"message": f"Configuration '{config_name}' saved successfully"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) + + +@router.delete("/configs/{config_name}") +async def delete_controller_config(config_name: str): + """ + Delete controller configuration. + + Args: + config_name: Name of the configuration file to delete + + Returns: + Success message when configuration is deleted + + Raises: + HTTPException: 404 if configuration not found + """ + try: + file_system.delete_file('conf/controllers', f"{config_name}.yml") + return {"message": f"Configuration '{config_name}' deleted successfully"} + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") @router.get("/{controller_type}/{controller_name}", response_model=Dict[str, str]) @@ -80,14 +166,15 @@ async def get_controller(controller_type: ControllerType, controller_name: str): ) -@router.post("/{controller_type}", status_code=status.HTTP_201_CREATED) -async def create_or_update_controller(controller_type: ControllerType, controller: Controller): +@router.post("/{controller_type}/{controller_name}", status_code=status.HTTP_201_CREATED) +async def create_or_update_controller(controller_type: ControllerType, controller_name: str, controller: Controller): """ Create or update a controller. Args: controller_type: Type of controller to create/update - controller: Controller object with name, type, and content + controller_name: Name of the controller (from URL path) + controller: Controller object with content (and optional type for validation) Returns: Success message when controller is saved @@ -95,7 +182,8 @@ async def create_or_update_controller(controller_type: ControllerType, controlle Raises: HTTPException: 400 if controller type mismatch or save error """ - if controller.type != controller_type: + # If type is provided in body, validate it matches URL + if controller.type is not None and controller.type != controller_type: raise HTTPException( status_code=400, detail=f"Controller type mismatch: URL has '{controller_type}', body has '{controller.type}'" @@ -104,11 +192,11 @@ async def create_or_update_controller(controller_type: ControllerType, controlle try: file_system.add_file( f'controllers/{controller_type.value}', - f"{controller.name}.py", + f"{controller_name}.py", controller.content, override=True ) - return {"message": f"Controller '{controller.name}' saved successfully in '{controller_type.value}'"} + return {"message": f"Controller '{controller_name}' saved successfully in '{controller_type.value}'"} except Exception as e: raise HTTPException(status_code=400, detail=str(e)) @@ -138,28 +226,6 @@ async def delete_controller(controller_type: ControllerType, controller_name: st ) -# Controller Configuration endpoints -@router.get("/{controller_name}/config", response_model=Dict) -async def get_controller_config(controller_name: str): - """ - Get controller configuration. - - Args: - controller_name: Name of the controller to get config for - - Returns: - Dictionary with controller configuration - - Raises: - HTTPException: 404 if configuration not found - """ - try: - config = file_system.read_yaml_file(f"bots/conf/controllers/{controller_name}.yml") - return config - except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Configuration for controller '{controller_name}' not found") - - @router.get("/{controller_type}/{controller_name}/config/template", response_model=Dict) async def get_controller_config_template(controller_type: ControllerType, controller_name: str): """ @@ -187,59 +253,6 @@ async def get_controller_config_template(controller_type: ControllerType, contro return json.loads(json.dumps(config_fields, default=str)) -@router.post("/{controller_name}/config", status_code=status.HTTP_201_CREATED) -async def create_or_update_controller_config(controller_name: str, config: Dict): - """ - Create or update controller configuration. - - Args: - controller_name: Name of the controller - config: Configuration dictionary to save - - Returns: - Success message when configuration is saved - - Raises: - HTTPException: 400 if save error occurs - """ - try: - yaml_content = yaml.dump(config, default_flow_style=False) - file_system.add_file('conf/controllers', f"{controller_name}.yml", yaml_content, override=True) - return {"message": f"Configuration for controller '{controller_name}' saved successfully"} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.delete("/{controller_name}/config") -async def delete_controller_config(controller_name: str): - """ - Delete controller configuration. - - Args: - controller_name: Name of the controller to delete config for - - Returns: - Success message when configuration is deleted - - Raises: - HTTPException: 404 if configuration not found - """ - try: - file_system.delete_file('conf/controllers', f"{controller_name}.yml") - return {"message": f"Configuration for controller '{controller_name}' deleted successfully"} - except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Configuration for controller '{controller_name}' not found") - - -@router.get("/configs/", response_model=List[str]) -async def list_controller_configs(): - """ - List all controller configurations. - - Returns: - List of controller configuration names - """ - return [f.replace('.yml', '') for f in file_system.list_files('conf/controllers') if f.endswith('.yml')] # Bot-specific controller config endpoints @@ -264,7 +277,7 @@ async def get_bot_controller_configs(bot_name: str): configs = [] for controller_file in file_system.list_files(bots_config_path): if controller_file.endswith('.yml'): - config = file_system.read_yaml_file(f"bots/{bots_config_path}/{controller_file}") + config = file_system.read_yaml_file(f"{bots_config_path}/{controller_file}") config['_config_name'] = controller_file.replace('.yml', '') configs.append(config) return configs @@ -291,9 +304,9 @@ async def update_bot_controller_config(bot_name: str, controller_name: str, conf raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") try: - current_config = file_system.read_yaml_file(f"bots/{bots_config_path}/{controller_name}.yml") + current_config = file_system.read_yaml_file(f"{bots_config_path}/{controller_name}.yml") current_config.update(config) - file_system.dump_dict_to_yaml(f"bots/{bots_config_path}/{controller_name}.yml", current_config) + file_system.dump_dict_to_yaml(f"{bots_config_path}/{controller_name}.yml", current_config) return {"message": f"Controller configuration for bot '{bot_name}' updated successfully"} except FileNotFoundError: raise HTTPException( From e9a452eaf65e57c0ae9bec5b0d307f0bd94244a2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 16:59:58 +0800 Subject: [PATCH 071/244] (feat) remove basic controllers --- .../directional_trading/ai_livestream.py | 86 ------------------- .../generic/basic_order_example.py | 48 ----------- .../generic/basic_order_open_close_example.py | 83 ------------------ 3 files changed, 217 deletions(-) delete mode 100644 bots/controllers/directional_trading/ai_livestream.py delete mode 100644 bots/controllers/generic/basic_order_example.py delete mode 100644 bots/controllers/generic/basic_order_open_close_example.py diff --git a/bots/controllers/directional_trading/ai_livestream.py b/bots/controllers/directional_trading/ai_livestream.py deleted file mode 100644 index 6cef9cfa..00000000 --- a/bots/controllers/directional_trading/ai_livestream.py +++ /dev/null @@ -1,86 +0,0 @@ -from decimal import Decimal -from typing import List - -import pandas_ta as ta # noqa: F401 -from pydantic import Field - -from hummingbot.core.data_type.common import TradeType -from hummingbot.data_feed.candles_feed.data_types import CandlesConfig -from hummingbot.remote_iface.mqtt import ExternalTopicFactory -from hummingbot.strategy_v2.controllers.directional_trading_controller_base import ( - DirectionalTradingControllerBase, - DirectionalTradingControllerConfigBase, -) -from hummingbot.strategy_v2.executors.position_executor.data_types import PositionExecutorConfig - - -class AILivestreamControllerConfig(DirectionalTradingControllerConfigBase): - controller_name: str = "ai_livestream" - candles_config: List[CandlesConfig] = [] - long_threshold: float = Field(default=0.5, json_schema_extra={"is_updatable": True}) - short_threshold: float = Field(default=0.5, json_schema_extra={"is_updatable": True}) - topic: str = "hbot/predictions" - - -class AILivestreamController(DirectionalTradingControllerBase): - def __init__(self, config: AILivestreamControllerConfig, *args, **kwargs): - self.config = config - super().__init__(config, *args, **kwargs) - # Start ML signal listener - self._init_ml_signal_listener() - - def _init_ml_signal_listener(self): - """Initialize a listener for ML signals from the MQTT broker""" - try: - normalized_pair = self.config.trading_pair.replace("-", "_").lower() - topic = f"{self.config.topic}/{normalized_pair}/ML_SIGNALS" - self._ml_signal_listener = ExternalTopicFactory.create_async( - topic=topic, - callback=self._handle_ml_signal, - use_bot_prefix=False, - ) - self.logger().info("ML signal listener initialized successfully") - except Exception as e: - self.logger().error(f"Failed to initialize ML signal listener: {str(e)}") - self._ml_signal_listener = None - - def _handle_ml_signal(self, signal: dict, topic: str): - """Handle incoming ML signal""" - # self.logger().info(f"Received ML signal: {signal}") - short, neutral, long = signal["probabilities"] - if short > self.config.short_threshold: - self.processed_data["signal"] = -1 - elif long > self.config.long_threshold: - self.processed_data["signal"] = 1 - else: - self.processed_data["signal"] = 0 - self.processed_data["features"] = signal - - async def update_processed_data(self): - pass - - def get_executor_config(self, trade_type: TradeType, price: Decimal, amount: Decimal): - """ - Get the executor config based on the trade_type, price and amount. This method can be overridden by the - subclasses if required. - """ - return PositionExecutorConfig( - timestamp=self.market_data_provider.time(), - connector_name=self.config.connector_name, - trading_pair=self.config.trading_pair, - side=trade_type, - entry_price=price, - amount=amount, - triple_barrier_config=self.config.triple_barrier_config.new_instance_with_adjusted_volatility( - volatility_factor=self.processed_data["features"].get("target_pct", 0.01)), - leverage=self.config.leverage, - ) - - def to_format_status(self) -> List[str]: - lines = [] - features = self.processed_data.get("features", {}) - lines.append(f"Signal: {self.processed_data.get('signal', 'N/A')}") - lines.append(f"Timestamp: {features.get('timestamp', 'N/A')}") - lines.append(f"Probabilities: {features.get('probabilities', 'N/A')}") - lines.append(f"Target Pct: {features.get('target_pct', 'N/A')}") - return lines diff --git a/bots/controllers/generic/basic_order_example.py b/bots/controllers/generic/basic_order_example.py deleted file mode 100644 index b1cb4e04..00000000 --- a/bots/controllers/generic/basic_order_example.py +++ /dev/null @@ -1,48 +0,0 @@ -from decimal import Decimal - -from hummingbot.core.data_type.common import MarketDict, PositionMode, PriceType, TradeType -from hummingbot.strategy_v2.controllers import ControllerBase, ControllerConfigBase -from hummingbot.strategy_v2.executors.order_executor.data_types import ExecutionStrategy, OrderExecutorConfig -from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction - - -class BasicOrderExampleConfig(ControllerConfigBase): - controller_name: str = "basic_order_example" - connector_name: str = "binance_perpetual" - trading_pair: str = "WLD-USDT" - side: TradeType = TradeType.BUY - position_mode: PositionMode = PositionMode.HEDGE - leverage: int = 20 - amount_quote: Decimal = Decimal("10") - order_frequency: int = 10 - - def update_markets(self, markets: MarketDict) -> MarketDict: - return markets.add_or_update(self.connector_name, self.trading_pair) - - -class BasicOrderExample(ControllerBase): - def __init__(self, config: BasicOrderExampleConfig, *args, **kwargs): - super().__init__(config, *args, **kwargs) - self.config = config - self.last_timestamp = 0 - - async def update_processed_data(self): - mid_price = self.market_data_provider.get_price_by_type(self.config.connector_name, self.config.trading_pair, PriceType.MidPrice) - n_active_executors = len([executor for executor in self.executors_info if executor.is_active]) - self.processed_data = {"mid_price": mid_price, "n_active_executors": n_active_executors} - - def determine_executor_actions(self) -> list[ExecutorAction]: - if (self.processed_data["n_active_executors"] == 0 and - self.market_data_provider.time() - self.last_timestamp > self.config.order_frequency): - self.last_timestamp = self.market_data_provider.time() - config = OrderExecutorConfig( - timestamp=self.market_data_provider.time(), - connector_name=self.config.connector_name, - trading_pair=self.config.trading_pair, - side=self.config.side, - amount=self.config.amount_quote / self.processed_data["mid_price"], - execution_strategy=ExecutionStrategy.MARKET, - price=self.processed_data["mid_price"], - ) - return [CreateExecutorAction(controller_id=self.config.id, executor_config=config)] - return [] diff --git a/bots/controllers/generic/basic_order_open_close_example.py b/bots/controllers/generic/basic_order_open_close_example.py deleted file mode 100644 index 1cea9bbd..00000000 --- a/bots/controllers/generic/basic_order_open_close_example.py +++ /dev/null @@ -1,83 +0,0 @@ -from decimal import Decimal - -from hummingbot.core.data_type.common import MarketDict, PositionAction, PositionMode, PriceType, TradeType -from hummingbot.strategy_v2.controllers import ControllerBase, ControllerConfigBase -from hummingbot.strategy_v2.executors.order_executor.data_types import ExecutionStrategy, OrderExecutorConfig -from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction - - -class BasicOrderOpenCloseExampleConfig(ControllerConfigBase): - controller_name: str = "basic_order_open_close_example" - controller_type: str = "generic" - connector_name: str = "binance_perpetual" - trading_pair: str = "WLD-USDT" - side: TradeType = TradeType.BUY - position_mode: PositionMode = PositionMode.HEDGE - leverage: int = 50 - close_order_delay: int = 10 - open_short_to_close_long: bool = False - close_partial_position: bool = False - amount_quote: Decimal = Decimal("20") - - def update_markets(self, markets: MarketDict) -> MarketDict: - return markets.add_or_update(self.connector_name, self.trading_pair) - - -class BasicOrderOpenClose(ControllerBase): - def __init__(self, config: BasicOrderOpenCloseExampleConfig, *args, **kwargs): - super().__init__(config, *args, **kwargs) - self.config = config - self.open_order_placed = False - self.closed_order_placed = False - self.last_timestamp = 0 - self.open_side = self.config.side - self.close_side = TradeType.SELL if self.config.side == TradeType.BUY else TradeType.BUY - - def get_position(self, connector_name, trading_pair): - for position in self.positions_held: - if position.connector_name == connector_name and position.trading_pair == trading_pair: - return position - - def determine_executor_actions(self) -> list[ExecutorAction]: - mid_price = self.market_data_provider.get_price_by_type(self.config.connector_name, self.config.trading_pair, PriceType.MidPrice) - if not self.open_order_placed: - config = OrderExecutorConfig( - timestamp=self.market_data_provider.time(), - connector_name=self.config.connector_name, - trading_pair=self.config.trading_pair, - side=self.config.side, - amount=self.config.amount_quote / mid_price, - execution_strategy=ExecutionStrategy.MARKET, - position_action=PositionAction.OPEN, - price=mid_price, - ) - self.open_order_placed = True - self.last_timestamp = self.market_data_provider.time() - return [CreateExecutorAction( - controller_id=self.config.id, - executor_config=config)] - else: - if self.market_data_provider.time() - self.last_timestamp > self.config.close_order_delay and not self.closed_order_placed: - current_position = self.get_position(self.config.connector_name, self.config.trading_pair) - if current_position is None: - self.logger().info("The original position is not found, can close the position") - else: - amount = current_position.amount / 2 if self.config.close_partial_position else current_position.amount - config = OrderExecutorConfig( - timestamp=self.market_data_provider.time(), - connector_name=self.config.connector_name, - trading_pair=self.config.trading_pair, - side=self.close_side, - amount=amount, - execution_strategy=ExecutionStrategy.MARKET, - position_action=PositionAction.OPEN if self.config.open_short_to_close_long else PositionAction.CLOSE, - price=mid_price, - ) - self.closed_order_placed = True - return [CreateExecutorAction( - controller_id=self.config.id, - executor_config=config)] - return [] - - async def update_processed_data(self): - pass From 907e18e27384e79b0162b53ec0418db49021ea34 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 17:43:18 +0800 Subject: [PATCH 072/244] (feat) remove basic controllers --- services/accounts_service.py | 39 +++++++++++++----------------------- 1 file changed, 14 insertions(+), 25 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 02cc516c..9e362933 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,5 +1,4 @@ import asyncio -import json import logging from datetime import datetime from decimal import Decimal @@ -30,18 +29,23 @@ class AccountsService: } def __init__(self, - update_account_state_interval_minutes: int = 5, - default_quote: str = "USDT", - account_history_file: str = "account_state_history.json"): + account_update_interval: int = 5, + default_quote: str = "USDT"): + """ + Initialize the AccountsService. + + Args: + account_update_interval: How often to update account states in minutes (default: 5) + default_quote: Default quote currency for trading pairs (default: "USDT") + """ self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) self.connector_manager = ConnectorManager(self.secrets_manager) self.accounts = {} self.accounts_state = {} self.account_state_update_event = asyncio.Event() self.initialize_accounts() - self.update_account_state_interval = update_account_state_interval_minutes * 60 + self.update_account_state_interval = account_update_interval * 60 self.default_quote = default_quote - self.history_file = account_history_file self._update_account_state_task: Optional[asyncio.Task] = None # Database setup @@ -116,13 +120,8 @@ async def dump_account_state(self): except Exception as e: logging.error(f"Error saving account state to database: {e}") - # Fallback to JSON file - timestamp = datetime.now().isoformat() - state_to_dump = {"timestamp": timestamp, "state": self.accounts_state} - if not file_system.path_exists(path=f"data/{self.history_file}"): - file_system.add_file(directory="data", file_name=self.history_file, content=json.dumps(state_to_dump) + "\n") - else: - file_system.append_to_file(directory="data", file_name=self.history_file, content=json.dumps(state_to_dump) + "\n") + # Re-raise the exception since we no longer have a fallback + raise async def load_account_state_history(self, limit: Optional[int] = None, @@ -146,18 +145,8 @@ async def load_account_state_history(self, ) except Exception as e: logging.error(f"Error loading account state history from database: {e}") - # Fallback to JSON file (simplified, no pagination) - history = [] - try: - with open("bots/data/" + self.history_file, "r") as file: - for line in file: - if line.strip(): # Check if the line is not empty - history.append(json.loads(line)) - if limit and len(history) >= limit: - break - except FileNotFoundError: - logging.warning("No account state history file found.") - return history, None, False + # Return empty result since we no longer have a fallback + return [], None, False async def check_all_connectors(self): """ From e8f78a50119694d4d5e858d775dd1d5d0c61ed48 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 17:43:31 +0800 Subject: [PATCH 073/244] (feat) improve scripts routes --- routers/scripts.py | 188 ++++++++++++++++++++++++++------------------- 1 file changed, 107 insertions(+), 81 deletions(-) diff --git a/routers/scripts.py b/routers/scripts.py index 75356cb4..8a87ee34 100644 --- a/routers/scripts.py +++ b/routers/scripts.py @@ -23,167 +23,193 @@ async def list_scripts(): return [f.replace('.py', '') for f in file_system.list_files('scripts') if f.endswith('.py')] -@router.get("/{script_name}", response_model=Dict[str, str]) -async def get_script(script_name: str): +# Script Configuration endpoints (must come before script name routes) +@router.get("/configs/", response_model=List[Dict]) +async def list_script_configs(): """ - Get script content by name. + List all script configurations with metadata. - Args: - script_name: Name of the script to retrieve - Returns: - Dictionary with script name and content - - Raises: - HTTPException: 404 if script not found + List of script configuration objects with name, script_file_name, and other metadata """ try: - content = file_system.read_file(f"scripts/{script_name}.py") - return { - "name": script_name, - "content": content - } + config_files = [f for f in file_system.list_files('conf/scripts') if f.endswith('.yml')] + configs = [] + + for config_file in config_files: + config_name = config_file.replace('.yml', '') + try: + config = file_system.read_yaml_file(f"conf/scripts/{config_file}") + configs.append({ + "config_name": config_name, + "script_file_name": config.get("script_file_name", "unknown"), + "controllers_config": config.get("controllers_config", []), + "candles_config": config.get("candles_config", []), + "markets": config.get("markets", {}) + }) + except Exception as e: + # If config is malformed, still include it with basic info + configs.append({ + "config_name": config_name, + "script_file_name": "error", + "error": str(e) + }) + + return configs except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") + return [] -@router.post("/", status_code=status.HTTP_201_CREATED) -async def create_or_update_script(script: Script): +@router.get("/configs/{config_name}", response_model=Dict) +async def get_script_config(config_name: str): """ - Create or update a script. + Get script configuration by config name. Args: - script: Script object with name and content + config_name: Name of the configuration file to retrieve Returns: - Success message when script is saved + Dictionary with script configuration Raises: - HTTPException: 400 if save error occurs + HTTPException: 404 if configuration not found """ try: - file_system.add_file('scripts', f"{script.name}.py", script.content, override=True) - return {"message": f"Script '{script.name}' saved successfully"} - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) + config = file_system.read_yaml_file(f"conf/scripts/{config_name}.yml") + return config + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") -@router.delete("/{script_name}") -async def delete_script(script_name: str): +@router.post("/configs/{config_name}", status_code=status.HTTP_201_CREATED) +async def create_or_update_script_config(config_name: str, config: Dict): """ - Delete a script. + Create or update script configuration. Args: - script_name: Name of the script to delete + config_name: Name of the configuration file + config: Configuration dictionary to save Returns: - Success message when script is deleted + Success message when configuration is saved Raises: - HTTPException: 404 if script not found + HTTPException: 400 if save error occurs """ try: - file_system.delete_file('scripts', f"{script_name}.py") - return {"message": f"Script '{script_name}' deleted successfully"} - except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") + yaml_content = yaml.dump(config, default_flow_style=False) + file_system.add_file('conf/scripts', f"{config_name}.yml", yaml_content, override=True) + return {"message": f"Configuration '{config_name}' saved successfully"} + except Exception as e: + raise HTTPException(status_code=400, detail=str(e)) -# Script Configuration endpoints -@router.get("/{script_name}/config", response_model=Dict) -async def get_script_config(script_name: str): +@router.delete("/configs/{config_name}") +async def delete_script_config(config_name: str): """ - Get script configuration. + Delete script configuration. Args: - script_name: Name of the script to get config for + config_name: Name of the configuration file to delete Returns: - Dictionary with script configuration + Success message when configuration is deleted Raises: HTTPException: 404 if configuration not found """ try: - config = file_system.read_yaml_file(f"bots/conf/scripts/{script_name}.yml") - return config + file_system.delete_file('conf/scripts', f"{config_name}.yml") + return {"message": f"Configuration '{config_name}' deleted successfully"} except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Configuration for script '{script_name}' not found") + raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") -@router.get("/{script_name}/config/template", response_model=Dict) -async def get_script_config_template(script_name: str): +@router.get("/{script_name}", response_model=Dict[str, str]) +async def get_script(script_name: str): """ - Get script configuration template with default values. + Get script content by name. Args: - script_name: Name of the script to get template for + script_name: Name of the script to retrieve Returns: - Dictionary with configuration template and default values + Dictionary with script name and content Raises: - HTTPException: 404 if script configuration class not found + HTTPException: 404 if script not found """ - config_class = file_system.load_script_config_class(script_name) - if config_class is None: - raise HTTPException(status_code=404, detail=f"Script configuration class for '{script_name}' not found") - - # Extract fields and default values - config_fields = {field.name: field.default for field in config_class.__fields__.values()} - return json.loads(json.dumps(config_fields, default=str)) + try: + content = file_system.read_file(f"scripts/{script_name}.py") + return { + "name": script_name, + "content": content + } + except FileNotFoundError: + raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") -@router.post("/{script_name}/config", status_code=status.HTTP_201_CREATED) -async def create_or_update_script_config(script_name: str, config: Dict): +@router.post("/{script_name}", status_code=status.HTTP_201_CREATED) +async def create_or_update_script(script_name: str, script: Script): """ - Create or update script configuration. + Create or update a script. Args: - script_name: Name of the script - config: Configuration dictionary to save + script_name: Name of the script (from URL path) + script: Script object with content Returns: - Success message when configuration is saved + Success message when script is saved Raises: HTTPException: 400 if save error occurs """ try: - yaml_content = yaml.dump(config, default_flow_style=False) - file_system.add_file('conf/scripts', f"{script_name}.yml", yaml_content, override=True) - return {"message": f"Configuration for script '{script_name}' saved successfully"} + file_system.add_file('scripts', f"{script_name}.py", script.content, override=True) + return {"message": f"Script '{script_name}' saved successfully"} except Exception as e: raise HTTPException(status_code=400, detail=str(e)) -@router.delete("/{script_name}/config") -async def delete_script_config(script_name: str): +@router.delete("/{script_name}") +async def delete_script(script_name: str): """ - Delete script configuration. + Delete a script. Args: - script_name: Name of the script to delete config for + script_name: Name of the script to delete Returns: - Success message when configuration is deleted + Success message when script is deleted Raises: - HTTPException: 404 if configuration not found + HTTPException: 404 if script not found """ try: - file_system.delete_file('conf/scripts', f"{script_name}.yml") - return {"message": f"Configuration for script '{script_name}' deleted successfully"} + file_system.delete_file('scripts', f"{script_name}.py") + return {"message": f"Script '{script_name}' deleted successfully"} except FileNotFoundError: - raise HTTPException(status_code=404, detail=f"Configuration for script '{script_name}' not found") + raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") -@router.get("/configs/", response_model=List[str]) -async def list_script_configs(): +@router.get("/{script_name}/config/template", response_model=Dict) +async def get_script_config_template(script_name: str): """ - List all script configurations. + Get script configuration template with default values. + Args: + script_name: Name of the script to get template for + Returns: - List of script configuration names + Dictionary with configuration template and default values + + Raises: + HTTPException: 404 if script configuration class not found """ - return [f.replace('.yml', '') for f in file_system.list_files('conf/scripts') if f.endswith('.yml')] \ No newline at end of file + config_class = file_system.load_script_config_class(script_name) + if config_class is None: + raise HTTPException(status_code=404, detail=f"Script configuration class for '{script_name}' not found") + + # Extract fields and default values + config_fields = {name: field.default for name, field in config_class.model_fields.items()} + return json.loads(json.dumps(config_fields, default=str)) \ No newline at end of file From 389f1cfa4547867cb92319d7b80b0df48697ba82 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 12 Jun 2025 17:43:46 +0800 Subject: [PATCH 074/244] (feat) add config for account state interval --- config.py | 6 ++++++ main.py | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/config.py b/config.py index 725601d1..17ac11a7 100644 --- a/config.py +++ b/config.py @@ -77,6 +77,12 @@ class AppSettings(BaseSettings): default="dev", description="Logfire environment name" ) + + # Account state update interval + account_update_interval: int = Field( + default=5, + description="How often to update account states in minutes" + ) model_config = SettingsConfigDict( env_file=".env", diff --git a/main.py b/main.py index d2da49d0..3f0ee6ad 100644 --- a/main.py +++ b/main.py @@ -74,7 +74,9 @@ async def lifespan(app: FastAPI): broker_password=settings.broker.password ) - accounts_service = AccountsService() + accounts_service = AccountsService( + account_update_interval=settings.app.account_update_interval + ) docker_service = DockerService() bot_archiver = BotArchiver( settings.aws.api_key, From b783c36f85d2fbec25ff0eceb19ea60907d08cf9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:06:15 +0800 Subject: [PATCH 075/244] (feat) add orders and trades models --- database/models.py | 70 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 69 insertions(+), 1 deletion(-) diff --git a/database/models.py b/database/models.py index afc5aa12..3c682459 100644 --- a/database/models.py +++ b/database/models.py @@ -5,6 +5,7 @@ Integer, Numeric, String, + Text, func, ) from sqlalchemy.ext.declarative import declarative_base @@ -35,4 +36,71 @@ class TokenState(Base): value = Column(Numeric(precision=30, scale=18), nullable=False) available_units = Column(Numeric(precision=30, scale=18), nullable=False) - account_state = relationship("AccountState", back_populates="token_states") \ No newline at end of file + account_state = relationship("AccountState", back_populates="token_states") + + +class Order(Base): + __tablename__ = "orders" + + id = Column(Integer, primary_key=True, index=True) + # Order identification + client_order_id = Column(String, nullable=False, unique=True, index=True) + exchange_order_id = Column(String, nullable=True, index=True) + + # Timestamps + created_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + updated_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False) + + # Account and connector info + account_name = Column(String, nullable=False, index=True) + connector_name = Column(String, nullable=False, index=True) + + # Order details + trading_pair = Column(String, nullable=False, index=True) + trade_type = Column(String, nullable=False) # BUY, SELL + order_type = Column(String, nullable=False) # LIMIT, MARKET, LIMIT_MAKER + amount = Column(Numeric(precision=30, scale=18), nullable=False) + price = Column(Numeric(precision=30, scale=18), nullable=True) # Null for market orders + + # Order status and execution + status = Column(String, nullable=False, default="SUBMITTED", index=True) # SUBMITTED, OPEN, FILLED, CANCELLED, FAILED + filled_amount = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + average_fill_price = Column(Numeric(precision=30, scale=18), nullable=True) + + # Fee information + fee_paid = Column(Numeric(precision=30, scale=18), nullable=True) + fee_currency = Column(String, nullable=True) + + # Additional metadata + error_message = Column(Text, nullable=True) + + # Relationships for future enhancements + trades = relationship("Trade", back_populates="order", cascade="all, delete-orphan") + + +class Trade(Base): + __tablename__ = "trades" + + id = Column(Integer, primary_key=True, index=True) + order_id = Column(Integer, ForeignKey("orders.id"), nullable=False) + + # Trade identification + trade_id = Column(String, nullable=False, unique=True, index=True) + + # Timestamps + timestamp = Column(TIMESTAMP(timezone=True), nullable=False, index=True) + + # Trade details + trading_pair = Column(String, nullable=False, index=True) + trade_type = Column(String, nullable=False) # BUY, SELL + amount = Column(Numeric(precision=30, scale=18), nullable=False) + price = Column(Numeric(precision=30, scale=18), nullable=False) + + # Fee information + fee_paid = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + fee_currency = Column(String, nullable=True) + + # Relationship + order = relationship("Order", back_populates="trades") + + From 4016bedda9d7a6867f11c54a8d304b59b93a01ea Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:06:27 +0800 Subject: [PATCH 076/244] (feat) add to connections the new models --- database/__init__.py | 4 ++-- database/connection.py | 19 +++++++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/database/__init__.py b/database/__init__.py index 6a7e0c88..70527e63 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -1,5 +1,5 @@ -from .models import AccountState, TokenState, Base +from .models import AccountState, TokenState, Order, Trade, Base from .connection import AsyncDatabaseManager from .repositories import AccountRepository -__all__ = ["AccountState", "TokenState", "Base", "AsyncDatabaseManager", "AccountRepository"] \ No newline at end of file +__all__ = ["AccountState", "TokenState", "Order", "Trade", "Base", "AsyncDatabaseManager", "AccountRepository"] \ No newline at end of file diff --git a/database/connection.py b/database/connection.py index cb4c9521..172e221e 100644 --- a/database/connection.py +++ b/database/connection.py @@ -44,10 +44,29 @@ async def create_tables(self): try: async with self.engine.begin() as conn: await conn.run_sync(Base.metadata.create_all) + + # Drop Hummingbot's native tables since we use our custom orders/trades tables + await self._drop_hummingbot_tables(conn) + logger.info("Database tables created successfully") except Exception as e: logger.error(f"Failed to create database tables: {e}") raise + + async def _drop_hummingbot_tables(self, conn): + """Drop Hummingbot's native database tables since we use custom ones.""" + hummingbot_tables = [ + "hummingbot_orders", + "hummingbot_trade_fills", + "hummingbot_order_status" + ] + + for table_name in hummingbot_tables: + try: + await conn.execute(text(f"DROP TABLE IF EXISTS {table_name}")) + logger.info(f"Dropped Hummingbot table: {table_name}") + except Exception as e: + logger.debug(f"Could not drop table {table_name}: {e}") # Use debug since table might not exist async def close(self): """Close all database connections.""" From 790ff14b65d962e9189fc0a747eb80bad80a2c4d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:06:57 +0800 Subject: [PATCH 077/244] (feat) simplify account routes and add trade functionality --- routers/accounts.py | 700 ++++++++++++++++++++++++-------------------- 1 file changed, 376 insertions(+), 324 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index a945538e..93f8b09f 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -1,4 +1,4 @@ -from typing import Dict, List +from typing import Dict, List, Optional from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, Query @@ -7,8 +7,9 @@ from services.accounts_service import AccountsService from utils.file_system import FileSystemUtil -from deps import get_accounts_service +from deps import get_accounts_service, get_market_data_feed_manager from models import PaginatedResponse +from models.bot import TradeRequest, TradeResponse router = APIRouter(tags=["Accounts"], prefix="/accounts") file_system = FileSystemUtil(base_path="bots/credentials") @@ -82,18 +83,6 @@ async def get_connector_config_map(connector_name: str, accounts_service: Accoun return accounts_service.get_connector_config_map(connector_name) -@router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) -async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get configuration fields for all available connectors. - - Returns: - Dictionary mapping connector names to their required configuration fields - """ - all_config_maps = {} - for connector in list(AllConnectorSettings.get_connector_settings().keys()): - all_config_maps[connector] = accounts_service.get_connector_config_map(connector) - return all_config_maps @router.get("/", response_model=List[str]) @@ -107,24 +96,6 @@ async def list_accounts(accounts_service: AccountsService = Depends(get_accounts return accounts_service.list_accounts() -@router.get("/{account_name}/credentials", response_model=List[str]) -async def list_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get a list of all credentials (connectors) configured for a specific account. - - Args: - account_name: Name of the account to list credentials for - - Returns: - List of credential file names (connectors) configured for the account - - Raises: - HTTPException: 404 if account not found - """ - try: - return accounts_service.list_credentials(account_name) - except FileNotFoundError as e: - raise HTTPException(status_code=404, detail=str(e)) @router.post("/add-account", status_code=status.HTTP_201_CREATED) @@ -165,7 +136,7 @@ async def delete_account(account_name: str, accounts_service: AccountsService = try: if account_name == "master_account": raise HTTPException(status_code=400, detail="Cannot delete master account.") - accounts_service.delete_account(account_name) + await accounts_service.delete_account(account_name) return {"message": "Account deleted successfully."} except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) @@ -187,7 +158,7 @@ async def delete_credential(account_name: str, connector_name: str, accounts_ser HTTPException: 404 if credential not found """ try: - accounts_service.delete_credentials(account_name, connector_name) + await accounts_service.delete_credentials(account_name, connector_name) return {"message": "Credential deleted successfully."} except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) @@ -213,7 +184,7 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di await accounts_service.add_credentials(account_name, connector_name, credentials) return {"message": "Connector credentials added successfully."} except Exception as e: - accounts_service.delete_credentials(account_name, connector_name) + await accounts_service.delete_credentials(account_name, connector_name) raise HTTPException(status_code=400, detail=str(e)) @@ -283,392 +254,473 @@ async def get_account_history( } ) - -@router.get("/{account_name}/value", response_model=Dict) -async def get_account_value(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): +# Trading endpoints +@router.post("/trade", response_model=TradeResponse, status_code=status.HTTP_201_CREATED) +async def place_trade(trade_request: TradeRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + market_data_manager = Depends(get_market_data_feed_manager)): """ - Get total portfolio value for a specific account. + Place a buy or sell order using a specific account and connector. Args: - account_name: Name of the account to get value for + trade_request: Trading request with account, connector, trading pair, type, amount, etc. + accounts_service: Injected accounts service Returns: - Dictionary with account name and total value + TradeResponse with order ID and trading details Raises: - HTTPException: 404 if account not found + HTTPException: 400 for invalid parameters, 404 for account/connector not found, 500 for trade execution errors """ - value_data = await accounts_service.get_portfolio_value(account_name) - if account_name not in value_data["accounts"]: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - return { - "account_name": account_name, - "total_value": value_data["accounts"].get(account_name, 0) - } + try: + order_id = await accounts_service.place_trade( + account_name=trade_request.account_name, + connector_name=trade_request.connector_name, + trading_pair=trade_request.trading_pair, + trade_type=trade_request.trade_type, + amount=trade_request.amount, + order_type=trade_request.order_type, + price=trade_request.price, + market_data_manager=market_data_manager + ) + + return TradeResponse( + order_id=order_id, + account_name=trade_request.account_name, + connector_name=trade_request.connector_name, + trading_pair=trade_request.trading_pair, + trade_type=trade_request.trade_type, + amount=trade_request.amount, + order_type=trade_request.order_type, + price=trade_request.price, + status="submitted" + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") -@router.get("/{account_name}/tokens", response_model=List[Dict]) -async def get_account_tokens(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/{account_name}/connectors/{connector_name}/orders", response_model=Dict[str, Dict]) +async def get_active_orders(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): """ - Get all tokens held by a specific account with aggregated information. + Get all active orders for a specific account and connector. Args: - account_name: Name of the account to get tokens for + account_name: Name of the account + connector_name: Name of the connector + accounts_service: Injected accounts service Returns: - List of token information with total units, value, and connector breakdown + Dictionary mapping order IDs to order details Raises: - HTTPException: 404 if account not found + HTTPException: 404 if account or connector not found """ - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + try: + return accounts_service.get_active_orders(account_name, connector_name) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving orders: {str(e)}") + + +@router.post("/{account_name}/connectors/{connector_name}/orders/{client_order_id}/cancel") +async def cancel_order(account_name: str, connector_name: str, client_order_id: str, + trading_pair: str = Query(..., description="Trading pair for the order to cancel"), + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Cancel a specific order by its client order ID. - tokens = {} - for connector_name, token_list in state.items(): - for token_info in token_list: - token = token_info["token"] - if token not in tokens: - tokens[token] = { - "token": token, - "total_units": 0, - "total_value": 0, - "average_price": 0, - "connectors": [] - } - tokens[token]["total_units"] += token_info["units"] - tokens[token]["total_value"] += token_info["value"] - tokens[token]["connectors"].append({ - "connector": connector_name, - "units": token_info["units"], - "value": token_info["value"] - }) - - # Calculate average price - for token_data in tokens.values(): - if token_data["total_units"] > 0: - token_data["average_price"] = token_data["total_value"] / token_data["total_units"] - - return list(tokens.values()) - - -# Connector-specific routes -@router.get("/{account_name}/connectors/{connector_name}/state", response_model=List[Dict]) -async def get_connector_state(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get current state of a specific connector. + Args: + account_name: Name of the account + connector_name: Name of the connector + client_order_id: Client order ID to cancel + trading_pair: Trading pair for the order + accounts_service: Injected accounts service + + Returns: + Success message with cancelled order ID + + Raises: + HTTPException: 404 if account/connector not found, 500 for cancellation errors + """ + try: + cancelled_order_id = await accounts_service.cancel_order( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + client_order_id=client_order_id + ) + return {"message": f"Order {cancelled_order_id} cancelled successfully"} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") + + +@router.get("/{account_name}/connectors/{connector_name}/trading-rules/{trading_pair}") +async def get_trading_rules(account_name: str, connector_name: str, trading_pair: str, + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get trading rules for a specific trading pair on a connector. Args: account_name: Name of the account connector_name: Name of the connector + trading_pair: Trading pair to get rules for + accounts_service: Injected accounts service Returns: - List of token information for the specific connector + Trading rules including minimum order size, price increment, etc. Raises: - HTTPException: 404 if connector not found for account + HTTPException: 404 if account/connector/trading pair not found """ - state = await accounts_service.get_connector_current_state(account_name, connector_name) - if not state: - raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") - return state + try: + connector = accounts_service.get_connector_instance(account_name, connector_name) + + if trading_pair not in connector.trading_rules: + raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found") + + trading_rule = connector.trading_rules[trading_pair] + return { + "trading_pair": trading_pair, + "min_order_size": float(trading_rule.min_order_size), + "max_order_size": float(trading_rule.max_order_size) if trading_rule.max_order_size else None, + "min_price_increment": float(trading_rule.min_price_increment), + "min_base_amount_increment": float(trading_rule.min_base_amount_increment), + "min_notional_size": float(trading_rule.min_notional_size), + "max_price_significant_digits": trading_rule.max_price_significant_digits, + "max_quantity_significant_digits": trading_rule.max_quantity_significant_digits, + "supports_limit_orders": trading_rule.supports_limit_orders, + "supports_market_orders": trading_rule.supports_market_orders, + } + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") -@router.get("/{account_name}/connectors/{connector_name}/state/history", response_model=PaginatedResponse) -async def get_connector_history( - account_name: str, - connector_name: str, - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), - accounts_service: AccountsService = Depends(get_accounts_service) -): +@router.get("/{account_name}/connectors/{connector_name}/supported-order-types") +async def get_supported_order_types(account_name: str, connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service)): """ - Get historical state of a specific connector with pagination. + Get order types supported by a specific connector. Args: account_name: Name of the account connector_name: Name of the connector - limit: Number of items per page (1-1000) - cursor: Cursor for pagination (ISO timestamp) - start_time: Start time for filtering results - end_time: End time for filtering results + accounts_service: Injected accounts service + + Returns: + List of supported order types (LIMIT, MARKET, LIMIT_MAKER) + + Raises: + HTTPException: 404 if account or connector not found + """ + try: + connector = accounts_service.get_connector_instance(account_name, connector_name) + return [order_type.name for order_type in connector.supported_order_types()] + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") + + +# Global order/trade endpoints for all accounts +@router.get("/orders", response_model=List[Dict]) +async def get_all_orders( + market: Optional[str] = Query(None, description="Filter by market/connector"), + symbol: Optional[str] = Query(None, description="Filter by trading pair"), + status: Optional[str] = Query(None, description="Filter by order status"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), + offset: int = Query(0, ge=0, description="Number of orders to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get order history across all accounts. + + Args: + market: Optional filter by market/connector + symbol: Optional filter by trading pair + status: Optional filter by order status + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of orders to return + offset: Number of orders to skip Returns: - Paginated response with historical connector state data + List of orders across all accounts """ - data, next_cursor, has_more = await accounts_service.get_connector_state_history( - account_name=account_name, - connector_name=connector_name, - limit=limit, - cursor=cursor, + return await accounts_service.get_orders( + account_name=None, # Query all accounts + market=market, + symbol=symbol, + status=status, start_time=start_time, - end_time=end_time + end_time=end_time, + limit=limit, + offset=offset, ) + + +@router.get("/orders/active", response_model=List[Dict]) +async def get_all_active_orders( + market: Optional[str] = Query(None, description="Filter by market/connector"), + symbol: Optional[str] = Query(None, description="Filter by trading pair"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get active orders across all accounts. - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor, - "filters": { - "account_name": account_name, - "connector_name": connector_name, - "start_time": start_time.isoformat() if start_time else None, - "end_time": end_time.isoformat() if end_time else None - } - } + Args: + market: Optional filter by market/connector + symbol: Optional filter by trading pair + + Returns: + List of active orders across all accounts + """ + return await accounts_service.get_active_orders_history( + account_name=None, # Query all accounts + market=market, + symbol=symbol, ) -# Token-specific routes -@router.get("/tokens", response_model=List[str]) -async def get_all_tokens(accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/orders/summary", response_model=Dict) +async def get_all_orders_summary( + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get all unique tokens across all accounts and connectors. + Get order summary statistics across all accounts. + Args: + start_time: Optional start timestamp + end_time: Optional end timestamp + Returns: - List of unique token symbols held across all accounts + Order summary statistics including fill rate, volumes, etc. """ - return await accounts_service.get_all_unique_tokens() + return await accounts_service.get_orders_summary( + account_name=None, # Query all accounts + start_time=start_time, + end_time=end_time, + ) -@router.get("/tokens/{token}/state", response_model=List[Dict]) -async def get_token_state(token: str, accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/trades", response_model=List[Dict]) +async def get_all_trades( + market: Optional[str] = Query(None, description="Filter by market/connector"), + symbol: Optional[str] = Query(None, description="Filter by trading pair"), + trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), + offset: int = Query(0, ge=0, description="Number of trades to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get current state of a specific token across all accounts. + Get trade history across all accounts. Args: - token: Symbol of the token to get state for + market: Optional filter by market/connector + symbol: Optional filter by trading pair + trade_type: Optional filter by trade type + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of trades to return + offset: Number of trades to skip Returns: - List of token holdings across all accounts and connectors - - Raises: - HTTPException: 404 if token not found + List of trades across all accounts """ - state = await accounts_service.get_token_current_state(token) - if not state: - raise HTTPException(status_code=404, detail=f"Token '{token}' not found") - return state + return await accounts_service.get_trades( + account_name=None, # Query all accounts + market=market, + symbol=symbol, + trade_type=trade_type, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) -@router.get("/tokens/{token}/accounts", response_model=List[Dict]) -async def get_token_accounts(token: str, accounts_service: AccountsService = Depends(get_accounts_service)): +# Order history endpoints integrated with accounts +@router.get("/{account_name}/orders", response_model=List[Dict]) +async def get_account_orders( + account_name: str, + connector_name: Optional[str] = Query(None, description="Filter by connector"), + trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), + status: Optional[str] = Query(None, description="Filter by order status"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), + offset: int = Query(0, ge=0, description="Number of orders to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get all accounts that hold a specific token with aggregated information. + Get order history for a specific account. Args: - token: Symbol of the token to search for + account_name: Name of the account + connector_name: Optional filter by connector + trading_pair: Optional filter by trading pair + status: Optional filter by order status + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of orders to return + offset: Number of orders to skip Returns: - List of accounts holding the token with total units, value, and connector breakdown + List of orders for the account Raises: - HTTPException: 404 if token not found + HTTPException: 404 if account not found """ - token_states = await accounts_service.get_token_current_state(token) - if not token_states: - raise HTTPException(status_code=404, detail=f"Token '{token}' not found") + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - accounts = {} - for state in token_states: - account_name = state["account_name"] - if account_name not in accounts: - accounts[account_name] = { - "account_name": account_name, - "total_units": 0, - "total_value": 0, - "connectors": [] - } - accounts[account_name]["total_units"] += state["units"] - accounts[account_name]["total_value"] += state["value"] - accounts[account_name]["connectors"].append({ - "connector_name": state["connector_name"], - "units": state["units"], - "value": state["value"] - }) + # Get orders from accounts service (will be implemented) + orders = await accounts_service.get_orders( + account_name=account_name, + market=connector_name, + symbol=trading_pair, + status=status, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) - return list(accounts.values()) + return orders -@router.get("/{account_name}/tokens/{token}", response_model=Dict) -async def get_account_token_state(account_name: str, token: str, accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/{account_name}/orders/active", response_model=List[Dict]) +async def get_account_active_orders( + account_name: str, + connector_name: Optional[str] = Query(None, description="Filter by connector"), + trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get state of a specific token for a specific account. + Get active orders for a specific account. Args: account_name: Name of the account - token: Symbol of the token to get state for + connector_name: Optional filter by connector + trading_pair: Optional filter by trading pair Returns: - Token information including total units, value, and connector breakdown + List of active orders Raises: - HTTPException: 404 if account or token not found + HTTPException: 404 if account not found """ + # Verify account exists state = await accounts_service.get_account_current_state(account_name) if not state: raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - token_data = { - "token": token, - "account_name": account_name, - "total_units": 0, - "total_value": 0, - "connectors": [] - } - - for connector_name, token_list in state.items(): - for token_info in token_list: - if token_info["token"] == token: - token_data["total_units"] += token_info["units"] - token_data["total_value"] += token_info["value"] - token_data["connectors"].append({ - "connector_name": connector_name, - "units": token_info["units"], - "value": token_info["value"], - "price": token_info["price"], - "available_units": token_info["available_units"] - }) - - if not token_data["connectors"]: - raise HTTPException(status_code=404, detail=f"Token '{token}' not found for account '{account_name}'") + # Get active orders from accounts service (will be implemented) + orders = await accounts_service.get_active_orders_history( + account_name=account_name, + market=connector_name, + symbol=trading_pair, + ) - return token_data + return orders -# Portfolio aggregation routes -@router.get("/portfolio/value", response_model=Dict) -async def get_portfolio_value(accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/{account_name}/orders/summary", response_model=Dict) +async def get_account_orders_summary( + account_name: str, + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get total portfolio value across all accounts. + Get order summary statistics for a specific account. + Args: + account_name: Name of the account + start_time: Optional start timestamp + end_time: Optional end timestamp + Returns: - Dictionary with total portfolio value and breakdown by account - """ - return await accounts_service.get_portfolio_value() - - -@router.get("/portfolio/tokens", response_model=List[Dict]) -async def get_portfolio_tokens(accounts_service: AccountsService = Depends(get_accounts_service)): + Order summary statistics including fill rate, volumes, etc. + + Raises: + HTTPException: 404 if account not found """ - Get all tokens with aggregated holdings across all accounts. - - Returns: - List of tokens with total units, value, average price, and account breakdown - """ - all_states = accounts_service.get_accounts_state() - - tokens = {} - for account_name, connectors in all_states.items(): - for connector_name, token_list in connectors.items(): - for token_info in token_list: - token = token_info["token"] - if token not in tokens: - tokens[token] = { - "token": token, - "total_units": 0, - "total_value": 0, - "accounts": {} - } - tokens[token]["total_units"] += token_info["units"] - tokens[token]["total_value"] += token_info["value"] - - if account_name not in tokens[token]["accounts"]: - tokens[token]["accounts"][account_name] = { - "units": 0, - "value": 0 - } - tokens[token]["accounts"][account_name]["units"] += token_info["units"] - tokens[token]["accounts"][account_name]["value"] += token_info["value"] - - # Convert accounts dict to list for response - result = [] - for token, data in tokens.items(): - token_data = { - "token": token, - "total_units": data["total_units"], - "total_value": data["total_value"], - "average_price": data["total_value"] / data["total_units"] if data["total_units"] > 0 else 0, - "accounts": [ - { - "account_name": acc_name, - "units": acc_data["units"], - "value": acc_data["value"] - } - for acc_name, acc_data in data["accounts"].items() - ] - } - result.append(token_data) + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - # Sort by total value descending - result.sort(key=lambda x: x["total_value"], reverse=True) + # Get summary from accounts service (will be implemented) + summary = await accounts_service.get_orders_summary( + account_name=account_name, + start_time=start_time, + end_time=end_time, + ) - return result + return summary -@router.get("/portfolio/distribution", response_model=Dict) -async def get_portfolio_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/{account_name}/trades", response_model=List[Dict]) +async def get_account_trades( + account_name: str, + connector_name: Optional[str] = Query(None, description="Filter by connector"), + trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), + trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), + offset: int = Query(0, ge=0, description="Number of trades to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get portfolio distribution by token, exchange, and account. + Get trade history for a specific account. + Args: + account_name: Name of the account + connector_name: Optional filter by connector + trading_pair: Optional filter by trading pair + trade_type: Optional filter by trade type + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of trades to return + offset: Number of trades to skip + Returns: - Dictionary with total value and percentage breakdowns by token, exchange, and account + List of trades for the account + + Raises: + HTTPException: 404 if account not found """ - all_states = accounts_service.get_accounts_state() - portfolio_value = await accounts_service.get_portfolio_value() - total_value = portfolio_value["total_value"] - - if total_value == 0: - return { - "total_value": 0, - "by_token": {}, - "by_exchange": {}, - "by_account": {} - } + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - # Distribution by token - by_token = {} - by_exchange = {} - - for account_name, connectors in all_states.items(): - for connector_name, token_list in connectors.items(): - if connector_name not in by_exchange: - by_exchange[connector_name] = {"value": 0, "percentage": 0} - - for token_info in token_list: - token = token_info["token"] - value = token_info["value"] - - if token not in by_token: - by_token[token] = {"value": 0, "percentage": 0} - - by_token[token]["value"] += value - by_exchange[connector_name]["value"] += value - - # Calculate percentages - for token_data in by_token.values(): - token_data["percentage"] = (token_data["value"] / total_value) * 100 - - for exchange_data in by_exchange.values(): - exchange_data["percentage"] = (exchange_data["value"] / total_value) * 100 - - # Account distribution from portfolio value - by_account = {} - for account_name, value in portfolio_value["accounts"].items(): - by_account[account_name] = { - "value": value, - "percentage": (value / total_value) * 100 if total_value > 0 else 0 - } + # Get trades from accounts service (will be implemented) + trades = await accounts_service.get_trades( + account_name=account_name, + market=connector_name, + symbol=trading_pair, + trade_type=trade_type, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) - return { - "total_value": total_value, - "by_token": by_token, - "by_exchange": by_exchange, - "by_account": by_account - } + return trades From a9749b929290f05338e3886e250f923a2397b642 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:07:28 +0800 Subject: [PATCH 078/244] (feat) bump accounts service to use native connector loops and order updates --- services/accounts_service.py | 581 +++++++++++++++++++++++++++++------ 1 file changed, 485 insertions(+), 96 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 9e362933..98920b66 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -6,9 +6,10 @@ from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger +from hummingbot.core.data_type.common import OrderType, TradeType from config import settings -from database import AsyncDatabaseManager, AccountRepository +from database import AsyncDatabaseManager, AccountRepository, Order, Trade from utils.connector_manager import ConnectorManager from utils.file_system import FileSystemUtil @@ -40,15 +41,12 @@ def __init__(self, """ self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) self.connector_manager = ConnectorManager(self.secrets_manager) - self.accounts = {} self.accounts_state = {} - self.account_state_update_event = asyncio.Event() - self.initialize_accounts() self.update_account_state_interval = account_update_interval * 60 self.default_quote = default_quote self._update_account_state_task: Optional[asyncio.Task] = None - # Database setup + # Database setup for account states and orders self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False @@ -68,32 +66,42 @@ def get_default_market(self, token: str, connector_name: str) -> str: quote = self.default_quotes.get(connector_name, self.default_quote) return f"{token}-{quote}" - def start_update_account_state_loop(self): + def start(self): """ - Start the loop that updates the balances of all the accounts at a fixed interval. + Start the loop that updates the account state at a fixed interval. + Note: Balance updates are now handled automatically by connector.start_network() :return: """ + # Start the update loop which will call check_all_connectors self._update_account_state_task = asyncio.create_task(self.update_account_state_loop()) - - def stop_update_account_state_loop(self): + + async def stop(self): """ - Stop the loop that updates the balances of all the accounts at a fixed interval. - :return: + Stop all accounts service tasks and cleanup resources. + This is the main cleanup method that should be called during application shutdown. """ + logging.info("Stopping AccountsService...") + + # Stop the account state update loop if self._update_account_state_task: self._update_account_state_task.cancel() - self._update_account_state_task = None + self._update_account_state_task = None + logging.info("Stopped account state update loop") + + # Stop all connectors through the ConnectorManager + await self.connector_manager.stop_all_connectors() + + logging.info("AccountsService stopped successfully") async def update_account_state_loop(self): """ - The loop that updates the balances of all the accounts at a fixed interval. + The loop that updates the account state at a fixed interval. + Note: Balance updates are now handled automatically by connector.start_network() :return: """ while True: try: await self.check_all_connectors() - await self.update_balances() - await self.update_trading_rules() await self.update_account_state() await self.dump_account_state() except Exception as e: @@ -150,89 +158,83 @@ async def load_account_state_history(self, async def check_all_connectors(self): """ - Check all avaialble credentials for all accounts and see if the connectors are created. - :return: + Check all available credentials for all accounts and ensure connectors are initialized. + This method is idempotent - it only initializes missing connectors. """ for account_name in self.list_accounts(): - for connector_name in self.list_credentials(account_name): - try: - connector_name = connector_name.split(".")[0] - if account_name not in self.accounts or connector_name not in self.accounts[account_name]: - self.initialize_connector(account_name, connector_name) - except Exception as e: - logging.error(f"Error initializing connector {connector_name}: {e}") + await self._ensure_account_connectors_initialized(account_name) - def initialize_accounts(self): + async def _ensure_account_connectors_initialized(self, account_name: str): """ - Initialize all the connectors that are connected to each account. - :return: - """ - for account_name in self.list_accounts(): - self.accounts[account_name] = {} - for connector_name in self.list_credentials(account_name): - try: - connector_name = connector_name.split(".")[0] - connector = self.connector_manager.get_connector(account_name, connector_name) - self.accounts[account_name][connector_name] = connector - except Exception as e: - logging.error(f"Error initializing connector {connector_name}: {e}") - - def initialize_account(self, account_name: str): - """ - Initialize all the connectors that are connected to the specified account. - :param account_name: The name of the account. - :return: + Ensure all connectors for a specific account are initialized. + This delegates to ConnectorManager for actual initialization. + + :param account_name: The name of the account to initialize connectors for. """ - for connector_name in self.list_credentials(account_name): + # Initialize missing connectors + for connector_name in self.connector_manager.list_available_credentials(account_name): try: - connector_name = connector_name.split(".")[0] - self.initialize_connector(account_name, connector_name) + # Only initialize if connector doesn't exist + if not self.connector_manager.is_connector_initialized(account_name, connector_name): + await self.connector_manager.initialize_connector_with_tracking( + account_name, connector_name, self.db_manager + ) + await self._update_connector_balance(account_name, connector_name) + except Exception as e: - logging.error(f"Error initializing connector {connector_name}: {e}") + logging.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") - def initialize_connector(self, account_name: str, connector_name: str): + + async def _update_connector_balance(self, account_name: str, connector_name: str): """ - Initialize the specified connector for the specified account. - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: + Update balance for a specific connector and store in accounts_state. + This is called after connector initialization to get initial balance data. """ - if account_name not in self.accounts: - self.accounts[account_name] = {} try: + tokens_info = [] connector = self.connector_manager.get_connector(account_name, connector_name) - self.accounts[account_name][connector_name] = connector - except Exception as e: - logging.error(f"Error initializing connector {connector_name}: {e}") - - async def update_balances(self): - tasks = [] - for account_name, connectors in self.accounts.items(): - for connector_instance in connectors.values(): - tasks.append(self._safe_update_balances(connector_instance)) - await asyncio.gather(*tasks) - - async def _safe_update_balances(self, connector_instance): - try: - await connector_instance._update_balances() - except Exception as e: - logging.error(f"Error updating balances for connector {connector_instance}: {e}") - - async def update_trading_rules(self): - tasks = [] - for account_name, connectors in self.accounts.items(): - for connector_instance in connectors.values(): - tasks.append(self._safe_update_trading_rules(connector_instance)) - await asyncio.gather(*tasks) + await connector._update_balances() + balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if + value != Decimal("0") and key not in settings.banned_tokens] + unique_tokens = [balance["token"] for balance in balances] + trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] + last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) + + for balance in balances: + token = balance["token"] + if "USD" in token: + price = Decimal("1") + else: + market = self.get_default_market(balance["token"], connector_name) + price = Decimal(last_traded_prices.get(market, 0)) + tokens_info.append({ + "token": balance["token"], + "units": float(balance["units"]), + "price": float(price), + "value": float(price * balance["units"]), + "available_units": float(connector.get_available_balance(balance["token"])) + }) + + # Ensure account exists in accounts_state before assignment + if account_name not in self.accounts_state: + self.accounts_state[account_name] = {} + + self.accounts_state[account_name][connector_name] = tokens_info - async def _safe_update_trading_rules(self, connector_instance): - try: - await connector_instance._update_trading_rules() + logging.info(f"Updated balance for {account_name}/{connector_name}: {len(tokens_info)} tokens") + except Exception as e: - logging.error(f"Error updating trading rules for connector {connector_instance}: {e}") + logging.error(f"Error updating balance for connector {connector_name} in account {account_name}: {e}") + # Set empty state if update fails + if account_name not in self.accounts_state: + self.accounts_state[account_name] = {} + self.accounts_state[account_name][connector_name] = [] async def update_account_state(self): - for account_name, connectors in self.accounts.items(): + # Get all connectors from ConnectorManager + all_connectors = self.connector_manager.get_all_connectors() + + for account_name, connectors in all_connectors.items(): if account_name not in self.accounts_state: self.accounts_state[account_name] = {} for connector_name, connector in connectors.items(): @@ -257,7 +259,6 @@ async def update_account_state(self): "value": float(price * balance["units"]), "available_units": float(connector.get_available_balance(balance["token"])) }) - self.account_state_update_event.set() except Exception as e: logging.error( f"Error updating balances for connector {connector_name} in account {account_name}: {e}") @@ -283,10 +284,22 @@ def get_connector_config_map(self, connector_name: str): return self.connector_manager.get_connector_config_map(connector_name) async def add_credentials(self, account_name: str, connector_name: str, credentials: dict): - new_connector = await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) - self.accounts[account_name][connector_name] = new_connector - await self.update_account_state() - await self.dump_account_state() + """ + Add or update connector credentials and initialize the connector. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :param credentials: Dictionary containing the connector credentials. + """ + # Update the connector keys (this saves the credentials to file) + await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) + + # Initialize the connector with tracking + await self.connector_manager.initialize_connector_with_tracking( + account_name, connector_name, self.db_manager + ) + await self._update_connector_balance(account_name, connector_name) + @staticmethod @@ -309,7 +322,7 @@ def list_credentials(self, account_name: str): except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) - def delete_credentials(self, account_name: str, connector_name: str): + async def delete_credentials(self, account_name: str, connector_name: str): """ Delete the credentials of the specified connector for the specified account. :param account_name: @@ -318,10 +331,14 @@ def delete_credentials(self, account_name: str, connector_name: str): """ if file_system.path_exists(f"credentials/{account_name}/connectors/{connector_name}.yml"): file_system.delete_file(directory=f"credentials/{account_name}/connectors", file_name=f"{connector_name}.yml") - if connector_name in self.accounts[account_name]: - self.accounts[account_name].pop(connector_name) - if connector_name in self.accounts_state[account_name]: + + # Stop the connector if it's running + await self.connector_manager.stop_connector(account_name, connector_name) + + # Remove from account state + if account_name in self.accounts_state and connector_name in self.accounts_state[account_name]: self.accounts_state[account_name].pop(connector_name) + # Clear the connector from cache self.connector_manager.clear_cache(account_name, connector_name) @@ -331,25 +348,36 @@ def add_account(self, account_name: str): :param account_name: :return: """ - if account_name in self.accounts: + # Check if account already exists by looking at folders + if account_name in self.list_accounts(): raise HTTPException(status_code=400, detail="Account already exists.") + files_to_copy = ["conf_client.yml", "conf_fee_overrides.yml", "hummingbot_logs.yml", ".password_verification"] file_system.create_folder('credentials', account_name) file_system.create_folder(f'credentials/{account_name}', "connectors") for file in files_to_copy: file_system.copy_file(f"credentials/master_account/{file}", f"credentials/{account_name}/{file}") - self.accounts[account_name] = {} + + # Initialize account state self.accounts_state[account_name] = {} - def delete_account(self, account_name: str): + async def delete_account(self, account_name: str): """ Delete the specified account. :param account_name: :return: """ + # Stop all connectors for this account + for connector_name in self.connector_manager.list_account_connectors(account_name): + await self.connector_manager.stop_connector(account_name, connector_name) + + # Delete account folder file_system.delete_folder('credentials', account_name) - self.accounts.pop(account_name) - self.accounts_state.pop(account_name) + + # Remove from account state + if account_name in self.accounts_state: + self.accounts_state.pop(account_name) + # Clear all connectors for this account from cache self.connector_manager.clear_cache(account_name) @@ -496,3 +524,364 @@ async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[ portfolio["total_value"] += account_value return portfolio + + async def place_trade(self, account_name: str, connector_name: str, trading_pair: str, + trade_type: TradeType, amount: Decimal, order_type: OrderType = OrderType.LIMIT, + price: Optional[Decimal] = None, market_data_manager = None) -> str: + """ + Place a trade using the specified account and connector. + + Args: + account_name: Name of the account to trade with + connector_name: Name of the connector/exchange + trading_pair: Trading pair (e.g., BTC-USDT) + trade_type: "BUY" or "SELL" + amount: Amount to trade + order_type: "LIMIT", "MARKET", or "LIMIT_MAKER" + price: Price for limit orders (required for LIMIT and LIMIT_MAKER) + + Returns: + Client order ID assigned by the connector + + Raises: + HTTPException: If account, connector not found, or trade fails + """ + # Validate account exists + if account_name not in self.list_accounts(): + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + # Validate connector exists for account + if not self.connector_manager.is_connector_initialized(account_name, connector_name): + raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") + + # Get the connector instance + connector = self.connector_manager.get_connector(account_name, connector_name) + + # Validate price for limit orders + if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] and price is None: + raise HTTPException(status_code=400, detail="Price is required for LIMIT and LIMIT_MAKER orders") + + # For market orders without price, get current market price + if order_type == OrderType.MARKET and price is None and market_data_manager: + try: + prices = await market_data_manager.get_prices(connector_name, [trading_pair]) + if trading_pair in prices and "error" not in prices: + price = Decimal(str(prices[trading_pair])) + logging.info(f"Retrieved market price for {trading_pair}: {price}") + else: + logging.warning(f"Could not get market price for {trading_pair}, using 0") + price = Decimal("0") + except Exception as e: + logging.error(f"Error getting market price for {trading_pair}: {e}") + price = Decimal("0") + + try: + # Place the order using the connector + if trade_type == TradeType.BUY: + order_id = connector.buy( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price or Decimal("0") + ) + else: + order_id = connector.sell( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price or Decimal("0") + ) + + # Wait briefly to check for immediate failures + await asyncio.sleep(0.5) + + # Check if order was immediately rejected or failed + if order_id in connector.in_flight_orders: + order = connector.in_flight_orders[order_id] + if hasattr(order, 'last_state') and order.last_state in ["FAILED", "CANCELLED"]: + error_msg = f"Order failed immediately: {getattr(order, 'last_failure_reason', 'Unknown error')}" + logging.error(error_msg) + raise HTTPException(status_code=400, detail=error_msg) + + logging.info(f"Placed {trade_type} order for {amount} {trading_pair} on {connector_name} (Account: {account_name}). Order ID: {order_id}") + return order_id + + except HTTPException: + # Re-raise HTTP exceptions as-is + raise + except Exception as e: + logging.error(f"Failed to place {trade_type} order: {e}") + raise HTTPException(status_code=500, detail=f"Failed to place trade: {str(e)}") + + def get_connector_instance(self, account_name: str, connector_name: str): + """ + Get a connector instance for direct access. + + Args: + account_name: Name of the account + connector_name: Name of the connector + + Returns: + Connector instance + + Raises: + HTTPException: If account or connector not found + """ + if account_name not in self.list_accounts(): + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + if not self.connector_manager.is_connector_initialized(account_name, connector_name): + raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") + + return self.connector_manager.get_connector(account_name, connector_name) + + def get_active_orders(self, account_name: str, connector_name: str) -> Dict[str, any]: + """ + Get active orders for a specific connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector + + Returns: + Dictionary of active orders + """ + connector = self.get_connector_instance(account_name, connector_name) + return {order_id: order.to_json() for order_id, order in connector.in_flight_orders.items()} + + async def cancel_order(self, account_name: str, connector_name: str, + trading_pair: str, client_order_id: str) -> str: + """ + Cancel an active order. + + Args: + account_name: Name of the account + connector_name: Name of the connector + trading_pair: Trading pair + client_order_id: Client order ID to cancel + + Returns: + Client order ID that was cancelled + """ + connector = self.get_connector_instance(account_name, connector_name) + + try: + result = connector.cancel(trading_pair=trading_pair, client_order_id=client_order_id) + logging.info(f"Cancelled order {client_order_id} on {connector_name} (Account: {account_name})") + return result + except Exception as e: + logging.error(f"Failed to cancel order {client_order_id}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to cancel order: {str(e)}") + + + async def get_orders(self, account_name: Optional[str] = None, market: Optional[str] = None, + symbol: Optional[str] = None, status: Optional[str] = None, + start_time: Optional[int] = None, end_time: Optional[int] = None, + limit: int = 100, offset: int = 0) -> List[Dict]: + """Get order history using our AsyncDatabaseManager.""" + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + query = session.query(Order) + + # Filter by account name if specified + if account_name: + query = query.filter(Order.account_name == account_name) + + # Filter by connector name if specified + if market: + query = query.filter(Order.connector_name == market) + + # Filter by trading pair if specified + if symbol: + query = query.filter(Order.trading_pair == symbol) + + # Filter by status if specified + if status: + query = query.filter(Order.status == status) + + # Filter by time range if specified + if start_time: + start_dt = datetime.fromtimestamp(start_time / 1000) # Convert from milliseconds + query = query.filter(Order.created_at >= start_dt) + if end_time: + end_dt = datetime.fromtimestamp(end_time / 1000) # Convert from milliseconds + query = query.filter(Order.created_at <= end_dt) + + query = query.order_by(Order.created_at.desc()) + query = query.limit(limit).offset(offset) + + result = await session.execute(query) + orders = result.scalars().all() + + # Convert to dict format + return [ + { + "order_id": order.client_order_id, + "account_name": order.account_name, + "connector_name": order.connector_name, + "trading_pair": order.trading_pair, + "trade_type": order.trade_type, + "order_type": order.order_type, + "amount": float(order.amount), + "price": float(order.price) if order.price else None, + "status": order.status, + "filled_amount": float(order.filled_amount), + "average_fill_price": float(order.average_fill_price) if order.average_fill_price else None, + "fee_paid": float(order.fee_paid) if order.fee_paid else None, + "fee_currency": order.fee_currency, + "created_at": order.created_at.isoformat(), + "updated_at": order.updated_at.isoformat(), + "exchange_order_id": order.exchange_order_id, + "error_message": order.error_message, + } + for order in orders + ] + except Exception as e: + logging.error(f"Error getting orders: {e}") + return [] + + async def get_active_orders_history(self, account_name: Optional[str] = None, market: Optional[str] = None, + symbol: Optional[str] = None) -> List[Dict]: + """Get active orders from database""" + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + query = session.query(Order).filter( + Order.status.in_(["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) + ) + + # Filter by account name if specified + if account_name: + query = query.filter(Order.account_name == account_name) + + # Filter by connector name if specified + if market: + query = query.filter(Order.connector_name == market) + + # Filter by trading pair if specified + if symbol: + query = query.filter(Order.trading_pair == symbol) + + query = query.order_by(Order.created_at.desc()) + query = query.limit(1000) + + result = await session.execute(query) + orders = result.scalars().all() + + # Convert to dict format using same structure as get_orders + return [ + { + "order_id": order.client_order_id, + "account_name": order.account_name, + "connector_name": order.connector_name, + "trading_pair": order.trading_pair, + "trade_type": order.trade_type, + "order_type": order.order_type, + "amount": float(order.amount), + "price": float(order.price) if order.price else None, + "status": order.status, + "filled_amount": float(order.filled_amount), + "average_fill_price": float(order.average_fill_price) if order.average_fill_price else None, + "fee_paid": float(order.fee_paid) if order.fee_paid else None, + "fee_currency": order.fee_currency, + "created_at": order.created_at.isoformat(), + "updated_at": order.updated_at.isoformat(), + "exchange_order_id": order.exchange_order_id, + "error_message": order.error_message, + } + for order in orders + ] + except Exception as e: + logging.error(f"Error getting active orders: {e}") + return [] + + async def get_orders_summary(self, account_name: Optional[str] = None, start_time: Optional[int] = None, + end_time: Optional[int] = None) -> Dict: + """Get order summary statistics""" + orders = await self.get_orders( + account_name=account_name, + start_time=start_time, + end_time=end_time, + limit=10000 # Get all for summary + ) + + total_orders = len(orders) + filled_orders = sum(1 for o in orders if o.get("status") == "FILLED") + cancelled_orders = sum(1 for o in orders if o.get("status") == "CANCELLED") + failed_orders = sum(1 for o in orders if o.get("status") == "FAILED") + active_orders = sum(1 for o in orders if o.get("status") in ["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) + + return { + "total_orders": total_orders, + "filled_orders": filled_orders, + "cancelled_orders": cancelled_orders, + "failed_orders": failed_orders, + "active_orders": active_orders, + "fill_rate": filled_orders / total_orders if total_orders > 0 else 0, + } + + async def get_trades(self, account_name: Optional[str] = None, market: Optional[str] = None, + symbol: Optional[str] = None, trade_type: Optional[str] = None, + start_time: Optional[int] = None, end_time: Optional[int] = None, + limit: int = 100, offset: int = 0) -> List[Dict]: + """Get trade history using our AsyncDatabaseManager""" + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + # Join trades with orders to get account information + query = session.query(Trade).join(Order, Trade.order_id == Order.id) + + # Filter by account name if specified + if account_name: + query = query.filter(Order.account_name == account_name) + + # Filter by connector name if specified + if market: + query = query.filter(Order.connector_name == market) + + # Filter by trading pair if specified + if symbol: + query = query.filter(Trade.trading_pair == symbol) + + # Filter by trade type if specified + if trade_type: + query = query.filter(Trade.trade_type == trade_type) + + # Filter by time range if specified + if start_time: + start_dt = datetime.fromtimestamp(start_time / 1000) # Convert from milliseconds + query = query.filter(Trade.timestamp >= start_dt) + if end_time: + end_dt = datetime.fromtimestamp(end_time / 1000) # Convert from milliseconds + query = query.filter(Trade.timestamp <= end_dt) + + query = query.order_by(Trade.timestamp.desc()) + query = query.limit(limit).offset(offset) + + result = await session.execute(query) + trades = result.scalars().all() + + # Convert to dict format + return [ + { + "trade_id": trade.trade_id, + "order_id": trade.order.client_order_id if trade.order else None, + "account_name": trade.order.account_name if trade.order else None, + "connector_name": trade.order.connector_name if trade.order else None, + "trading_pair": trade.trading_pair, + "trade_type": trade.trade_type, + "amount": float(trade.amount), + "price": float(trade.price), + "fee_paid": float(trade.fee_paid), + "fee_currency": trade.fee_currency, + "timestamp": trade.timestamp.isoformat(), + } + for trade in trades + ] + except Exception as e: + logging.error(f"Error getting trades: {e}") + return [] From cb4d3ff0aea52da2a2e1b5ea718770cc01f330a9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:07:56 +0800 Subject: [PATCH 079/244] (feat) add orders recorder subscriber to update orders --- services/orders_recorder.py | 183 ++++++++++++++++++++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100644 services/orders_recorder.py diff --git a/services/orders_recorder.py b/services/orders_recorder.py new file mode 100644 index 00000000..4db16c42 --- /dev/null +++ b/services/orders_recorder.py @@ -0,0 +1,183 @@ +import asyncio +import logging +from typing import Dict, Any, Optional +from decimal import Decimal +from datetime import datetime + +from hummingbot.core.event.event_listener import EventListener +from hummingbot.core.event.events import ( + OrderType, + TradeType, + BuyOrderCreatedEvent, + SellOrderCreatedEvent, + OrderFilledEvent, + OrderCancelledEvent, + MarketEvent, + BuyOrderCompletedEvent, + SellOrderCompletedEvent +) +from hummingbot.connector.connector_base import ConnectorBase + +from database import AsyncDatabaseManager +from database.models import Order, Trade + + +class OrdersRecorder(EventListener): + """ + Custom orders recorder that mimics Hummingbot's MarketsRecorder functionality + but uses our AsyncDatabaseManager for storage. + """ + + def __init__(self, db_manager: AsyncDatabaseManager, account_name: str, connector_name: str): + super().__init__() + self.db_manager = db_manager + self.account_name = account_name + self.connector_name = connector_name + self._connector: Optional[ConnectorBase] = None + self._session = None + + def start(self, connector: ConnectorBase): + """Start recording orders for the given connector""" + self._connector = connector + + # Subscribe to order events + connector.add_listener(MarketEvent.BuyOrderCreated, self) + connector.add_listener(MarketEvent.SellOrderCreated, self) + connector.add_listener(MarketEvent.OrderFilled, self) + connector.add_listener(MarketEvent.OrderCancelled, self) + connector.add_listener(MarketEvent.OrderFailure, self) + connector.add_listener(MarketEvent.BuyOrderCompleted, self) + connector.add_listener(MarketEvent.SellOrderCompleted, self) + + logging.info(f"OrdersRecorder started for {self.account_name}/{self.connector_name}") + + async def stop(self): + """Stop recording orders""" + if self._connector: + # Remove all event listeners + self._connector.remove_listener(MarketEvent.BuyOrderCreated, self) + self._connector.remove_listener(MarketEvent.SellOrderCreated, self) + self._connector.remove_listener(MarketEvent.OrderFilled, self) + self._connector.remove_listener(MarketEvent.OrderCancelled, self) + self._connector.remove_listener(MarketEvent.OrderFailure, self) + self._connector.remove_listener(MarketEvent.BuyOrderCompleted, self) + self._connector.remove_listener(MarketEvent.SellOrderCompleted, self) + + logging.info(f"OrdersRecorder stopped for {self.account_name}/{self.connector_name}") + + async def __call__(self, event_tag: int, market: ConnectorBase, event: Any): + """Handle incoming events""" + try: + if event_tag == MarketEvent.BuyOrderCreated.value: + await self._handle_order_created(event, TradeType.BUY) + elif event_tag == MarketEvent.SellOrderCreated.value: + await self._handle_order_created(event, TradeType.SELL) + elif event_tag == MarketEvent.OrderFilled.value: + await self._handle_order_filled(event) + elif event_tag == MarketEvent.OrderCancelled.value: + await self._handle_order_cancelled(event) + elif event_tag == MarketEvent.OrderFailure.value: + await self._handle_order_failed(event) + elif event_tag == MarketEvent.BuyOrderCompleted.value: + await self._handle_order_completed(event) + elif event_tag == MarketEvent.SellOrderCompleted.value: + await self._handle_order_completed(event) + else: + logging.error(f"Unknown event tag {event_tag} received, event {event}") + except Exception as e: + logging.error(f"Error handling event {event_tag}: {e}") + + async def _handle_order_created(self, event: Any, trade_type: TradeType): + """Handle order creation events""" + try: + async with self.db_manager.get_session_context() as session: + order = Order( + client_order_id=event.order_id, + account_name=self.account_name, + connector_name=self.connector_name, + trading_pair=event.trading_pair, + trade_type=trade_type.name, + order_type=event.order_type.name if hasattr(event, 'order_type') else 'UNKNOWN', + amount=float(event.amount), + price=float(event.price) if event.price else None, + status="SUBMITTED" + ) + session.add(order) + await session.commit() + + logging.debug(f"Recorded order created: {event.order_id}") + except Exception as e: + logging.error(f"Error recording order created: {e}") + + async def _handle_order_filled(self, event: OrderFilledEvent): + """Handle order fill events""" + try: + async with self.db_manager.get_session_context() as session: + # Update order with fill information + order = await session.get(Order, {"client_order_id": event.order_id}) + if order: + order.filled_amount = float(event.amount) + order.average_fill_price = float(event.price) + order.status = "FILLED" if event.amount >= Decimal(str(order.amount)) else "PARTIALLY_FILLED" + order.fee_paid = float(event.trade_fee.fee) if event.trade_fee else None + order.fee_currency = event.trade_fee.fee_asset if event.trade_fee else None + + # Create trade record + trade = Trade( + order_id=order.id if order else None, + trade_id=f"{event.order_id}_{event.timestamp}", + timestamp=datetime.fromtimestamp(event.timestamp), + trading_pair=event.trading_pair, + trade_type=event.trade_type.name, + amount=float(event.amount), + price=float(event.price), + fee_paid=float(event.trade_fee.fee) if event.trade_fee else 0, + fee_currency=event.trade_fee.fee_asset if event.trade_fee else None + ) + session.add(trade) + await session.commit() + + logging.debug(f"Recorded order fill: {event.order_id} - {event.amount} @ {event.price}") + except Exception as e: + logging.error(f"Error recording order fill: {e}") + + async def _handle_order_cancelled(self, event: Any): + """Handle order cancellation events""" + try: + async with self.db_manager.get_session_context() as session: + order = await session.get(Order, {"client_order_id": event.order_id}) + if order: + order.status = "CANCELLED" + await session.commit() + + logging.debug(f"Recorded order cancelled: {event.order_id}") + except Exception as e: + logging.error(f"Error recording order cancellation: {e}") + + async def _handle_order_failed(self, event: Any): + """Handle order failure events""" + try: + async with self.db_manager.get_session_context() as session: + order = await session.get(Order, {"client_order_id": event.order_id}) + if order: + order.status = "FAILED" + order.error_message = getattr(event, 'error_message', None) + await session.commit() + + logging.debug(f"Recorded order failed: {event.order_id}") + except Exception as e: + logging.error(f"Error recording order failure: {e}") + + async def _handle_order_completed(self, event: Any): + """Handle order completion events""" + try: + async with self.db_manager.get_session_context() as session: + order = await session.get(Order, {"client_order_id": event.order_id}) + if order: + order.status = "FILLED" + order.exchange_order_id = getattr(event, 'exchange_order_id', None) + await session.commit() + + logging.debug(f"Recorded order completed: {event.order_id}") + except Exception as e: + logging.error(f"Error recording order completion: {e}") \ No newline at end of file From 6e169c18c69fb02ea1cb9e355e3eed62012ab82c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:08:15 +0800 Subject: [PATCH 080/244] (feat) normalize methods --- services/bots_orchestrator.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index a5aa1215..c324eff6 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -53,7 +53,7 @@ def _sync_get_active_containers(self): if container.status == "running" and self.hummingbot_containers_fiter(container) ] - def start_update_active_bots_loop(self): + def start(self): """Start the loop that monitors active bots.""" # Start MQTT manager and update loop in async context self._update_bots_task = asyncio.create_task(self._start_async()) @@ -66,7 +66,7 @@ async def _start_async(self): # Then start the update loop await self.update_active_bots() - def stop_update_active_bots_loop(self): + def stop(self): """Stop the active bots monitoring loop.""" if self._update_bots_task: self._update_bots_task.cancel() From e71c3362c31bdeb7406d9f5ba6c960c37b7a85d7 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:12:19 +0800 Subject: [PATCH 081/244] (feat) add trade request models --- models/bot.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/models/bot.py b/models/bot.py index df66aefb..acad6577 100644 --- a/models/bot.py +++ b/models/bot.py @@ -1,6 +1,8 @@ from typing import Any, Dict, Optional from pydantic import BaseModel, Field from enum import Enum +from decimal import Decimal +from hummingbot.core.data_type.common import OrderType, TradeType class ControllerType(str, Enum): @@ -45,6 +47,30 @@ class ControllerConfig(ConfigContent): pass +class TradeRequest(BaseModel): + """Request model for placing trades""" + account_name: str = Field(description="Name of the account to trade with") + connector_name: str = Field(description="Name of the connector/exchange") + trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") + trade_type: TradeType = Field(description="Whether to buy or sell") + amount: Decimal = Field(description="Amount to trade", gt=0) + order_type: OrderType = Field(default=OrderType.LIMIT, description="Type of order") + price: Optional[Decimal] = Field(default=None, description="Price for limit orders") + + +class TradeResponse(BaseModel): + """Response model for trade execution""" + order_id: str = Field(description="Client order ID assigned by the connector") + account_name: str = Field(description="Account used for the trade") + connector_name: str = Field(description="Connector used for the trade") + trading_pair: str = Field(description="Trading pair") + trade_type: TradeType = Field(description="Trade type") + amount: Decimal = Field(description="Trade amount") + order_type: OrderType = Field(description="Order type") + price: Optional[Decimal] = Field(description="Order price") + status: str = Field(default="submitted", description="Order status") + + class BotAction(BaseModel): bot_name: str = Field(description="Name of the bot instance to act upon") From 06044e27278465717164cb1a68b3d5de3ba93faa Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:18:51 +0800 Subject: [PATCH 082/244] (feat) add market data feed as dependency --- deps.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/deps.py b/deps.py index f1470a82..17702b14 100644 --- a/deps.py +++ b/deps.py @@ -2,6 +2,7 @@ from services.bots_orchestrator import BotsOrchestrator from services.accounts_service import AccountsService from services.docker_service import DockerService +from services.market_data_feed_manager import MarketDataFeedManager from utils.bot_archiver import BotArchiver @@ -20,6 +21,11 @@ def get_docker_service(request: Request) -> DockerService: return request.app.state.docker_service +def get_market_data_feed_manager(request: Request) -> MarketDataFeedManager: + """Get MarketDataFeedManager from app state.""" + return request.app.state.market_data_feed_manager + + def get_bot_archiver(request: Request) -> BotArchiver: """Get BotArchiver from app state.""" return request.app.state.bot_archiver \ No newline at end of file From d8304bf60d3472c7554bced4a1d71fbd90baa677 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:19:06 +0800 Subject: [PATCH 083/244] (feat) use renamed methods in main lifespan --- main.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/main.py b/main.py index 3f0ee6ad..5b56d962 100644 --- a/main.py +++ b/main.py @@ -105,15 +105,15 @@ async def lifespan(app: FastAPI): app.state.market_data_feed_manager = market_data_feed_manager # Start services - bots_orchestrator.start_update_active_bots_loop() - accounts_service.start_update_account_state_loop() + bots_orchestrator.start() + accounts_service.start() market_data_feed_manager.start() yield # Shutdown services - bots_orchestrator.stop_update_active_bots_loop() - accounts_service.stop_update_account_state_loop() + bots_orchestrator.stop() + await accounts_service.stop() # Stop market data feed manager (which will stop all feeds) market_data_feed_manager.stop() From 2d274a47ee2a5f60aad22d36e1b0dd9452a4651c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:41:33 +0800 Subject: [PATCH 084/244] (feat) add position action to support perps --- models/bot.py | 3 +- utils/connector_manager.py | 177 ++++++++++++++++++++++++++++++++++++- 2 files changed, 176 insertions(+), 4 deletions(-) diff --git a/models/bot.py b/models/bot.py index acad6577..6423705c 100644 --- a/models/bot.py +++ b/models/bot.py @@ -2,7 +2,7 @@ from pydantic import BaseModel, Field from enum import Enum from decimal import Decimal -from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction class ControllerType(str, Enum): @@ -56,6 +56,7 @@ class TradeRequest(BaseModel): amount: Decimal = Field(description="Amount to trade", gt=0) order_type: OrderType = Field(default=OrderType.LIMIT, description="Type of order") price: Optional[Decimal] = Field(default=None, description="Price for limit orders") + position_action: Optional[PositionAction] = Field(default=PositionAction.OPEN, description="Position action for perpetual contracts (OPEN/CLOSE)") class TradeResponse(BaseModel): diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 6a9729ab..26a5f4a4 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -1,24 +1,33 @@ +import asyncio import logging -from typing import Dict, Optional +from typing import Dict, List, Optional, Tuple from hummingbot.client.config.client_config_map import ClientConfigMap from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class from hummingbot.client.settings import AllConnectorSettings +from hummingbot.connector.connector_base import ConnectorBase +from hummingbot.connector.exchange_base import ExchangeBase +from hummingbot.connector.exchange_py_base import ExchangePyBase +from hummingbot.core.utils.async_utils import safe_ensure_future from utils.backend_api_config_adapter import BackendAPIConfigAdapter from utils.security import BackendAPISecurity +from utils.file_system import FileSystemUtil class ConnectorManager: """ Manages the creation and caching of exchange connectors. Handles connector configuration and initialization. + This is the single source of truth for all connector instances. """ def __init__(self, secrets_manager: ETHKeyFileSecretManger): self.secrets_manager = secrets_manager - self._connector_cache: Dict[str, Dict[str, any]] = {} + self._connector_cache: Dict[str, ConnectorBase] = {} + self._orders_recorders: Dict[str, any] = {} + self._file_system = FileSystemUtil() def get_connector(self, account_name: str, connector_name: str): """ @@ -121,4 +130,166 @@ async def update_connector_keys(self, account_name: str, connector_name: str, ke new_connector = self.get_connector(account_name, connector_name) await new_connector._update_balances() - return new_connector \ No newline at end of file + return new_connector + + def list_account_connectors(self, account_name: str) -> List[str]: + """ + List all initialized connectors for a specific account. + + :param account_name: The name of the account. + :return: List of connector names. + """ + connectors = [] + for cache_key in self._connector_cache.keys(): + acc_name, conn_name = cache_key.split(":", 1) + if acc_name == account_name: + connectors.append(conn_name) + return connectors + + def get_all_connectors(self) -> Dict[str, Dict[str, ConnectorBase]]: + """ + Get all connectors organized by account. + + :return: Dictionary mapping account names to their connectors. + """ + result = {} + for cache_key, connector in self._connector_cache.items(): + account_name, connector_name = cache_key.split(":", 1) + if account_name not in result: + result[account_name] = {} + result[account_name][connector_name] = connector + return result + + def is_connector_initialized(self, account_name: str, connector_name: str) -> bool: + """ + Check if a connector is already initialized and cached. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :return: True if the connector is initialized, False otherwise. + """ + cache_key = f"{account_name}:{connector_name}" + return cache_key in self._connector_cache + + def get_connector_state(self, account_name: str, connector_name: str) -> Dict[str, any]: + """ + Get the current state of a connector (balances, trading rules, etc). + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :return: Dictionary containing connector state information. + """ + connector = self.get_connector(account_name, connector_name) + + return { + "balances": {k: float(v) for k, v in connector.get_all_balances().items()}, + "available_balances": {k: float(connector.get_available_balance(k)) + for k in connector.get_all_balances().keys()}, + "is_ready": connector.ready, + "name": connector.name, + "trading_required": connector.is_trading_required + } + + async def initialize_connector_with_tracking(self, account_name: str, connector_name: str, db_manager=None) -> ConnectorBase: + """ + Initialize a connector with order tracking infrastructure. + This includes creating the connector, starting its network, and setting up order recording. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :param db_manager: Database manager for order recording (optional). + :return: The initialized connector instance. + """ + # Get or create the connector + connector = self.get_connector(account_name, connector_name) + + # Start order tracking if db_manager provided + if db_manager: + cache_key = f"{account_name}:{connector_name}" + if cache_key not in self._orders_recorders: + # Import OrdersRecorder dynamically to avoid circular imports + from services.orders_recorder import OrdersRecorder + + # Create and start orders recorder + orders_recorder = OrdersRecorder(db_manager, account_name, connector_name) + orders_recorder.start(connector) + self._orders_recorders[cache_key] = orders_recorder + + # Start the connector's network without order book tracker + self._start_network_without_order_book(connector) + + # Update initial balances + await connector._update_balances() + + logging.info(f"Initialized connector {connector_name} for account {account_name} with tracking") + return connector + + def _start_network_without_order_book(self, connector: ExchangePyBase): + """ + Start connector network tasks except the order book tracker. + This avoids issues when there are no trading pairs configured. + """ + try: + # Start only the essential polling tasks if trading is required + connector._trading_rules_polling_task = safe_ensure_future(connector._trading_rules_polling_loop()) + connector._trading_fees_polling_task = safe_ensure_future(connector._trading_fees_polling_loop()) + connector._status_polling_task = safe_ensure_future(connector._status_polling_loop()) + connector._user_stream_tracker_task = connector._create_user_stream_tracker_task() + connector._user_stream_event_listener_task = safe_ensure_future(connector._user_stream_event_listener()) + connector._lost_orders_update_task = safe_ensure_future(connector._lost_orders_update_polling_loop()) + + logging.info(f"Started connector network without order book tracker") + + except Exception as e: + logging.error(f"Error starting connector network without order book: {e}") + + async def stop_connector(self, account_name: str, connector_name: str): + """ + Stop a connector and its associated services. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + """ + cache_key = f"{account_name}:{connector_name}" + + # Stop order recorder if exists + if cache_key in self._orders_recorders: + try: + await self._orders_recorders[cache_key].stop() + del self._orders_recorders[cache_key] + logging.info(f"Stopped order recorder for {account_name}/{connector_name}") + except Exception as e: + logging.error(f"Error stopping order recorder for {account_name}/{connector_name}: {e}") + + # Stop connector network if exists + if cache_key in self._connector_cache: + try: + connector = self._connector_cache[cache_key] + await connector.stop_network() + logging.info(f"Stopped connector network for {account_name}/{connector_name}") + except Exception as e: + logging.error(f"Error stopping connector network for {account_name}/{connector_name}: {e}") + + async def stop_all_connectors(self): + """ + Stop all connectors and their associated services. + """ + # Get all account/connector pairs + pairs = [(k.split(":", 1)[0], k.split(":", 1)[1]) for k in self._connector_cache.keys()] + + # Stop each connector + for account_name, connector_name in pairs: + await self.stop_connector(account_name, connector_name) + + def list_available_credentials(self, account_name: str) -> List[str]: + """ + List all available connector credentials for an account. + + :param account_name: The name of the account. + :return: List of connector names that have credentials. + """ + try: + files = self._file_system.list_files(f'credentials/{account_name}/connectors') + return [file.replace('.yml', '') for file in files if file.endswith('.yml')] + except FileNotFoundError: + return [] \ No newline at end of file From 8df2db0f71ec0ec156a45f8564dbf29c07507fda Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:41:51 +0800 Subject: [PATCH 085/244] (feat) add position action to place trade --- routers/accounts.py | 1 + 1 file changed, 1 insertion(+) diff --git a/routers/accounts.py b/routers/accounts.py index 93f8b09f..c14d7d11 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -281,6 +281,7 @@ async def place_trade(trade_request: TradeRequest, amount=trade_request.amount, order_type=trade_request.order_type, price=trade_request.price, + position_action=trade_request.position_action, market_data_manager=market_data_manager ) From 9cdf4f833d6b14ed5c3b86b63e73d942eb1a7693 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:47:18 +0800 Subject: [PATCH 086/244] (feat) add trading rules --- services/market_data_feed_manager.py | 150 ++++++++++++++++++++++++++- 1 file changed, 149 insertions(+), 1 deletion(-) diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py index 01e9825b..9ca152bd 100644 --- a/services/market_data_feed_manager.py +++ b/services/market_data_feed_manager.py @@ -1,8 +1,9 @@ import asyncio import time -from typing import Dict, Optional, Any, Callable +from typing import Dict, Optional, Any, Callable, List, Set import logging from enum import Enum +from decimal import Decimal from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.data_feed.market_data_provider import MarketDataProvider @@ -168,6 +169,153 @@ def get_order_book_snapshot(self, connector_name: str, trading_pair: str): self.logger.debug(f"Accessed order book snapshot: {feed_key}") return snapshot + async def get_trading_rules(self, connector_name: str, trading_pairs: Optional[List[str]] = None) -> Dict[str, Dict]: + """ + Get trading rules for specified trading pairs from a connector. + + Args: + connector_name: Name of the connector + trading_pairs: List of trading pairs to get rules for. If None, get all available. + + Returns: + Dictionary mapping trading pairs to their trading rules + """ + try: + # Access connector through MarketDataProvider's _rate_sources LazyDict + connector = self.market_data_provider._rate_sources[connector_name] + + # Update trading rules to ensure we have the latest data + await connector._update_trading_rules() + + # Get trading rules + if trading_pairs: + # Get rules for specific trading pairs + result = {} + for trading_pair in trading_pairs: + if trading_pair in connector.trading_rules: + rule = connector.trading_rules[trading_pair] + result[trading_pair] = { + "min_order_size": float(rule.min_order_size), + "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, + "min_price_increment": float(rule.min_price_increment), + "min_base_amount_increment": float(rule.min_base_amount_increment), + "min_notional_size": float(rule.min_notional_size), + "max_price_significant_digits": rule.max_price_significant_digits, + "max_quantity_significant_digits": rule.max_quantity_significant_digits, + "supports_limit_orders": rule.supports_limit_orders, + "supports_market_orders": rule.supports_market_orders, + } + else: + result[trading_pair] = {"error": f"Trading pair {trading_pair} not found"} + else: + # Get all trading rules + result = {} + for trading_pair, rule in connector.trading_rules.items(): + result[trading_pair] = { + "min_order_size": float(rule.min_order_size), + "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, + "min_price_increment": float(rule.min_price_increment), + "min_base_amount_increment": float(rule.min_base_amount_increment), + "min_notional_size": float(rule.min_notional_size), + "max_price_significant_digits": rule.max_price_significant_digits, + "max_quantity_significant_digits": rule.max_quantity_significant_digits, + "supports_limit_orders": rule.supports_limit_orders, + "supports_market_orders": rule.supports_market_orders, + } + + self.logger.debug(f"Retrieved trading rules for {connector_name}: {len(result)} pairs") + return result + + except Exception as e: + self.logger.error(f"Error getting trading rules for {connector_name}: {e}") + return {"error": str(e)} + + async def get_prices(self, connector_name: str, trading_pairs: List[str]) -> Dict[str, float]: + """ + Get current prices for specified trading pairs. + + Args: + connector_name: Name of the connector + trading_pairs: List of trading pairs to get prices for + + Returns: + Dictionary mapping trading pairs to their current prices + """ + try: + # Access connector through MarketDataProvider's _rate_sources LazyDict + connector = self.market_data_provider._rate_sources[connector_name] + + # Get last traded prices + prices = await connector.get_last_traded_prices(trading_pairs) + + # Convert Decimal to float for JSON serialization + result = {pair: float(price) for pair, price in prices.items()} + + self.logger.debug(f"Retrieved prices for {connector_name}: {len(result)} pairs") + return result + + except Exception as e: + self.logger.error(f"Error getting prices for {connector_name}: {e}") + return {"error": str(e)} + + def get_order_book_data(self, connector_name: str, trading_pair: str, depth: int = 10) -> Dict: + """ + Get order book data using the connector's order book data source. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair to get order book for + depth: Number of bid/ask levels to return + + Returns: + Dictionary containing bid and ask data + """ + try: + # Access connector through MarketDataProvider's _rate_sources LazyDict + connector = self.market_data_provider._rate_sources[connector_name] + + # Access the order book data source + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + orderbook_ds = connector._orderbook_ds + + # Check if the trading pair is available in the order book data source + if trading_pair in orderbook_ds: + orderbook = orderbook_ds[trading_pair] + + # Get bid and ask data + bids = [] + asks = [] + + # Get top bids (highest prices first) + for i, (price, amount) in enumerate(orderbook.bid_entries()): + if i >= depth: + break + bids.append({"price": float(price), "amount": float(amount)}) + + # Get top asks (lowest prices first) + for i, (price, amount) in enumerate(orderbook.ask_entries()): + if i >= depth: + break + asks.append({"price": float(price), "amount": float(amount)}) + + result = { + "trading_pair": trading_pair, + "bids": bids, + "asks": asks, + "timestamp": time.time() + } + + self.logger.debug(f"Retrieved order book for {connector_name}/{trading_pair}") + return result + else: + return {"error": f"Trading pair {trading_pair} not found in order book data source"} + else: + return {"error": f"Order book data source not available for {connector_name}"} + + except Exception as e: + self.logger.error(f"Error getting order book for {connector_name}/{trading_pair}: {e}") + return {"error": str(e)} + async def _cleanup_loop(self): """Background task that periodically cleans up unused feeds.""" while self._is_running: From 3c7777b13953564a0d3873e001fe88f8e837d8fb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 13 Jun 2025 22:52:11 +0800 Subject: [PATCH 087/244] (feat) update services --- services/accounts_service.py | 91 ++++++++++------ services/orders_recorder.py | 198 ++++++++++++++++++++++++++--------- 2 files changed, 205 insertions(+), 84 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 98920b66..e9efef04 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -6,7 +6,8 @@ from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from hummingbot.core.data_type.common import OrderType, TradeType +from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction +from sqlalchemy import select from config import settings from database import AsyncDatabaseManager, AccountRepository, Order, Trade @@ -527,7 +528,8 @@ async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[ async def place_trade(self, account_name: str, connector_name: str, trading_pair: str, trade_type: TradeType, amount: Decimal, order_type: OrderType = OrderType.LIMIT, - price: Optional[Decimal] = None, market_data_manager = None) -> str: + price: Optional[Decimal] = None, position_action: Optional[PositionAction] = None, + market_data_manager = None) -> str: """ Place a trade using the specified account and connector. @@ -576,21 +578,46 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair price = Decimal("0") try: + # Check if this is a perpetual connector that needs position_action + is_perpetual = "_perpetual" in connector_name + + # Use default position action if not specified and it's a perpetual connector + if is_perpetual and position_action is None: + position_action = PositionAction.OPEN + # Place the order using the connector if trade_type == TradeType.BUY: - order_id = connector.buy( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=price or Decimal("0") - ) + if is_perpetual: + order_id = connector.buy( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price or Decimal("0"), + position_action=position_action + ) + else: + order_id = connector.buy( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price or Decimal("0") + ) else: - order_id = connector.sell( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=price or Decimal("0") - ) + if is_perpetual: + order_id = connector.sell( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price or Decimal("0"), + position_action=position_action + ) + else: + order_id = connector.sell( + trading_pair=trading_pair, + amount=amount, + order_type=order_type, + price=price or Decimal("0") + ) # Wait briefly to check for immediate failures await asyncio.sleep(0.5) @@ -683,31 +710,31 @@ async def get_orders(self, account_name: Optional[str] = None, market: Optional[ try: async with self.db_manager.get_session_context() as session: - query = session.query(Order) + query = select(Order) # Filter by account name if specified if account_name: - query = query.filter(Order.account_name == account_name) + query = query.where(Order.account_name == account_name) # Filter by connector name if specified if market: - query = query.filter(Order.connector_name == market) + query = query.where(Order.connector_name == market) # Filter by trading pair if specified if symbol: - query = query.filter(Order.trading_pair == symbol) + query = query.where(Order.trading_pair == symbol) # Filter by status if specified if status: - query = query.filter(Order.status == status) + query = query.where(Order.status == status) # Filter by time range if specified if start_time: start_dt = datetime.fromtimestamp(start_time / 1000) # Convert from milliseconds - query = query.filter(Order.created_at >= start_dt) + query = query.where(Order.created_at >= start_dt) if end_time: end_dt = datetime.fromtimestamp(end_time / 1000) # Convert from milliseconds - query = query.filter(Order.created_at <= end_dt) + query = query.where(Order.created_at <= end_dt) query = query.order_by(Order.created_at.desc()) query = query.limit(limit).offset(offset) @@ -749,21 +776,21 @@ async def get_active_orders_history(self, account_name: Optional[str] = None, ma try: async with self.db_manager.get_session_context() as session: - query = session.query(Order).filter( + query = select(Order).where( Order.status.in_(["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) ) # Filter by account name if specified if account_name: - query = query.filter(Order.account_name == account_name) + query = query.where(Order.account_name == account_name) # Filter by connector name if specified if market: - query = query.filter(Order.connector_name == market) + query = query.where(Order.connector_name == market) # Filter by trading pair if specified if symbol: - query = query.filter(Order.trading_pair == symbol) + query = query.where(Order.trading_pair == symbol) query = query.order_by(Order.created_at.desc()) query = query.limit(1000) @@ -833,31 +860,31 @@ async def get_trades(self, account_name: Optional[str] = None, market: Optional[ try: async with self.db_manager.get_session_context() as session: # Join trades with orders to get account information - query = session.query(Trade).join(Order, Trade.order_id == Order.id) + query = select(Trade).join(Order, Trade.order_id == Order.id) # Filter by account name if specified if account_name: - query = query.filter(Order.account_name == account_name) + query = query.where(Order.account_name == account_name) # Filter by connector name if specified if market: - query = query.filter(Order.connector_name == market) + query = query.where(Order.connector_name == market) # Filter by trading pair if specified if symbol: - query = query.filter(Trade.trading_pair == symbol) + query = query.where(Trade.trading_pair == symbol) # Filter by trade type if specified if trade_type: - query = query.filter(Trade.trade_type == trade_type) + query = query.where(Trade.trade_type == trade_type) # Filter by time range if specified if start_time: start_dt = datetime.fromtimestamp(start_time / 1000) # Convert from milliseconds - query = query.filter(Trade.timestamp >= start_dt) + query = query.where(Trade.timestamp >= start_dt) if end_time: end_dt = datetime.fromtimestamp(end_time / 1000) # Convert from milliseconds - query = query.filter(Trade.timestamp <= end_dt) + query = query.where(Trade.timestamp <= end_dt) query = query.order_by(Trade.timestamp.desc()) query = query.limit(limit).offset(offset) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 4db16c42..fbd3af52 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -1,10 +1,10 @@ import asyncio import logging -from typing import Dict, Any, Optional +from typing import Any, Optional, Union from decimal import Decimal from datetime import datetime -from hummingbot.core.event.event_listener import EventListener +from hummingbot.core.event.event_forwarder import SourceInfoEventForwarder from hummingbot.core.event.events import ( OrderType, TradeType, @@ -14,81 +14,127 @@ OrderCancelledEvent, MarketEvent, BuyOrderCompletedEvent, - SellOrderCompletedEvent + SellOrderCompletedEvent, + MarketOrderFailureEvent ) from hummingbot.connector.connector_base import ConnectorBase +from sqlalchemy import select from database import AsyncDatabaseManager from database.models import Order, Trade -class OrdersRecorder(EventListener): +class OrdersRecorder: """ Custom orders recorder that mimics Hummingbot's MarketsRecorder functionality but uses our AsyncDatabaseManager for storage. """ def __init__(self, db_manager: AsyncDatabaseManager, account_name: str, connector_name: str): - super().__init__() self.db_manager = db_manager self.account_name = account_name self.connector_name = connector_name self._connector: Optional[ConnectorBase] = None - self._session = None + + # Create event forwarders similar to MarketsRecorder + self._create_order_forwarder = SourceInfoEventForwarder(self._did_create_order) + self._fill_order_forwarder = SourceInfoEventForwarder(self._did_fill_order) + self._cancel_order_forwarder = SourceInfoEventForwarder(self._did_cancel_order) + self._fail_order_forwarder = SourceInfoEventForwarder(self._did_fail_order) + self._complete_order_forwarder = SourceInfoEventForwarder(self._did_complete_order) + + # Event pairs mapping events to forwarders + self._event_pairs = [ + (MarketEvent.BuyOrderCreated, self._create_order_forwarder), + (MarketEvent.SellOrderCreated, self._create_order_forwarder), + (MarketEvent.OrderFilled, self._fill_order_forwarder), + (MarketEvent.OrderCancelled, self._cancel_order_forwarder), + (MarketEvent.OrderFailure, self._fail_order_forwarder), + (MarketEvent.BuyOrderCompleted, self._complete_order_forwarder), + (MarketEvent.SellOrderCompleted, self._complete_order_forwarder), + ] def start(self, connector: ConnectorBase): """Start recording orders for the given connector""" self._connector = connector - # Subscribe to order events - connector.add_listener(MarketEvent.BuyOrderCreated, self) - connector.add_listener(MarketEvent.SellOrderCreated, self) - connector.add_listener(MarketEvent.OrderFilled, self) - connector.add_listener(MarketEvent.OrderCancelled, self) - connector.add_listener(MarketEvent.OrderFailure, self) - connector.add_listener(MarketEvent.BuyOrderCompleted, self) - connector.add_listener(MarketEvent.SellOrderCompleted, self) + # Subscribe to order events using the same pattern as MarketsRecorder + for event, forwarder in self._event_pairs: + connector.add_listener(event, forwarder) + logging.info(f"OrdersRecorder: Added listener for {event} with forwarder {forwarder}") + + # Debug: Check if listeners were actually added + if hasattr(connector, '_event_listeners'): + listeners = connector._event_listeners.get(event, []) + logging.info(f"OrdersRecorder: Event {event} now has {len(listeners)} listeners") + for i, listener in enumerate(listeners): + logging.info(f"OrdersRecorder: Listener {i}: {listener}") + + logging.info(f"OrdersRecorder started for {self.account_name}/{self.connector_name} with {len(self._event_pairs)} event listeners") - logging.info(f"OrdersRecorder started for {self.account_name}/{self.connector_name}") + # Debug: Print connector info + logging.info(f"OrdersRecorder: Connector type: {type(connector)}") + logging.info(f"OrdersRecorder: Connector name: {getattr(connector, 'name', 'unknown')}") + logging.info(f"OrdersRecorder: Connector ready: {getattr(connector, 'ready', 'unknown')}") + + # Test if forwarders are callable + for event, forwarder in self._event_pairs: + if callable(forwarder): + logging.info(f"OrdersRecorder: Forwarder for {event} is callable") + else: + logging.error(f"OrdersRecorder: Forwarder for {event} is NOT callable: {type(forwarder)}") async def stop(self): """Stop recording orders""" if self._connector: # Remove all event listeners - self._connector.remove_listener(MarketEvent.BuyOrderCreated, self) - self._connector.remove_listener(MarketEvent.SellOrderCreated, self) - self._connector.remove_listener(MarketEvent.OrderFilled, self) - self._connector.remove_listener(MarketEvent.OrderCancelled, self) - self._connector.remove_listener(MarketEvent.OrderFailure, self) - self._connector.remove_listener(MarketEvent.BuyOrderCompleted, self) - self._connector.remove_listener(MarketEvent.SellOrderCompleted, self) + for event, forwarder in self._event_pairs: + self._connector.remove_listener(event, forwarder) logging.info(f"OrdersRecorder stopped for {self.account_name}/{self.connector_name}") - async def __call__(self, event_tag: int, market: ConnectorBase, event: Any): - """Handle incoming events""" + def _did_create_order(self, event_tag: int, market: ConnectorBase, event: Union[BuyOrderCreatedEvent, SellOrderCreatedEvent]): + """Handle order creation events - called by SourceInfoEventForwarder""" + logging.info(f"OrdersRecorder: _did_create_order called for order {getattr(event, 'order_id', 'unknown')}") try: - if event_tag == MarketEvent.BuyOrderCreated.value: - await self._handle_order_created(event, TradeType.BUY) - elif event_tag == MarketEvent.SellOrderCreated.value: - await self._handle_order_created(event, TradeType.SELL) - elif event_tag == MarketEvent.OrderFilled.value: - await self._handle_order_filled(event) - elif event_tag == MarketEvent.OrderCancelled.value: - await self._handle_order_cancelled(event) - elif event_tag == MarketEvent.OrderFailure.value: - await self._handle_order_failed(event) - elif event_tag == MarketEvent.BuyOrderCompleted.value: - await self._handle_order_completed(event) - elif event_tag == MarketEvent.SellOrderCompleted.value: - await self._handle_order_completed(event) - else: - logging.error(f"Unknown event tag {event_tag} received, event {event}") + # Determine trade type from event + trade_type = TradeType.BUY if isinstance(event, BuyOrderCreatedEvent) else TradeType.SELL + logging.info(f"OrdersRecorder: Creating task to handle order created - {trade_type} order") + asyncio.create_task(self._handle_order_created(event, trade_type)) + except Exception as e: + logging.error(f"Error in _did_create_order: {e}") + + def _did_fill_order(self, event_tag: int, market: ConnectorBase, event: OrderFilledEvent): + """Handle order fill events - called by SourceInfoEventForwarder""" + try: + asyncio.create_task(self._handle_order_filled(event)) + except Exception as e: + logging.error(f"Error in _did_fill_order: {e}") + + def _did_cancel_order(self, event_tag: int, market: ConnectorBase, event: Any): + """Handle order cancel events - called by SourceInfoEventForwarder""" + try: + asyncio.create_task(self._handle_order_cancelled(event)) except Exception as e: - logging.error(f"Error handling event {event_tag}: {e}") + logging.error(f"Error in _did_cancel_order: {e}") - async def _handle_order_created(self, event: Any, trade_type: TradeType): + def _did_fail_order(self, event_tag: int, market: ConnectorBase, event: Any): + """Handle order failure events - called by SourceInfoEventForwarder""" + try: + asyncio.create_task(self._handle_order_failed(event)) + except Exception as e: + logging.error(f"Error in _did_fail_order: {e}") + + def _did_complete_order(self, event_tag: int, market: ConnectorBase, event: Any): + """Handle order completion events - called by SourceInfoEventForwarder""" + try: + asyncio.create_task(self._handle_order_completed(event)) + except Exception as e: + logging.error(f"Error in _did_complete_order: {e}") + + async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrderCreatedEvent], trade_type: TradeType): """Handle order creation events""" + logging.info(f"OrdersRecorder: _handle_order_created started for order {event.order_id}") try: async with self.db_manager.get_session_context() as session: order = Order( @@ -105,24 +151,63 @@ async def _handle_order_created(self, event: Any, trade_type: TradeType): session.add(order) await session.commit() - logging.debug(f"Recorded order created: {event.order_id}") + logging.info(f"OrdersRecorder: Successfully recorded order created: {event.order_id}") except Exception as e: - logging.error(f"Error recording order created: {e}") + logging.error(f"OrdersRecorder: Error recording order created: {e}") async def _handle_order_filled(self, event: OrderFilledEvent): """Handle order fill events""" try: async with self.db_manager.get_session_context() as session: # Update order with fill information - order = await session.get(Order, {"client_order_id": event.order_id}) + result = await session.execute( + select(Order).where(Order.client_order_id == event.order_id) + ) + order = result.scalar_one_or_none() if order: order.filled_amount = float(event.amount) order.average_fill_price = float(event.price) order.status = "FILLED" if event.amount >= Decimal(str(order.amount)) else "PARTIALLY_FILLED" - order.fee_paid = float(event.trade_fee.fee) if event.trade_fee else None - order.fee_currency = event.trade_fee.fee_asset if event.trade_fee else None + + # Calculate fee properly using the same method as MarketsRecorder + if event.trade_fee: + try: + base_asset, quote_asset = event.trading_pair.split("-") + fee_in_quote = event.trade_fee.fee_amount_in_token( + trading_pair=event.trading_pair, + price=event.price, + order_amount=event.amount, + token=quote_asset, + exchange=self._connector + ) + order.fee_paid = float(fee_in_quote) + order.fee_currency = quote_asset + except Exception as e: + logging.error(f"Error calculating fee in quote: {e}, will be stored as 0") + order.fee_paid = 0 + order.fee_currency = None # Create trade record + trade_fee_paid = 0 + trade_fee_currency = None + + if event.trade_fee: + try: + base_asset, quote_asset = event.trading_pair.split("-") + fee_in_quote = event.trade_fee.fee_amount_in_token( + trading_pair=event.trading_pair, + price=event.price, + order_amount=event.amount, + token=quote_asset, + exchange=self._connector + ) + trade_fee_paid = float(fee_in_quote) + trade_fee_currency = quote_asset + except Exception as e: + logging.error(f"Error calculating trade fee: {e}") + trade_fee_paid = 0 + trade_fee_currency = None + trade = Trade( order_id=order.id if order else None, trade_id=f"{event.order_id}_{event.timestamp}", @@ -131,8 +216,8 @@ async def _handle_order_filled(self, event: OrderFilledEvent): trade_type=event.trade_type.name, amount=float(event.amount), price=float(event.price), - fee_paid=float(event.trade_fee.fee) if event.trade_fee else 0, - fee_currency=event.trade_fee.fee_asset if event.trade_fee else None + fee_paid=trade_fee_paid, + fee_currency=trade_fee_currency ) session.add(trade) await session.commit() @@ -145,7 +230,10 @@ async def _handle_order_cancelled(self, event: Any): """Handle order cancellation events""" try: async with self.db_manager.get_session_context() as session: - order = await session.get(Order, {"client_order_id": event.order_id}) + result = await session.execute( + select(Order).where(Order.client_order_id == event.order_id) + ) + order = result.scalar_one_or_none() if order: order.status = "CANCELLED" await session.commit() @@ -158,7 +246,10 @@ async def _handle_order_failed(self, event: Any): """Handle order failure events""" try: async with self.db_manager.get_session_context() as session: - order = await session.get(Order, {"client_order_id": event.order_id}) + result = await session.execute( + select(Order).where(Order.client_order_id == event.order_id) + ) + order = result.scalar_one_or_none() if order: order.status = "FAILED" order.error_message = getattr(event, 'error_message', None) @@ -172,7 +263,10 @@ async def _handle_order_completed(self, event: Any): """Handle order completion events""" try: async with self.db_manager.get_session_context() as session: - order = await session.get(Order, {"client_order_id": event.order_id}) + result = await session.execute( + select(Order).where(Order.client_order_id == event.order_id) + ) + order = result.scalar_one_or_none() if order: order.status = "FILLED" order.exchange_order_id = getattr(event, 'exchange_order_id', None) From dfa5fd570dc9983ea20deb4df10703ac85a5f3a5 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 14 Jun 2025 08:16:42 +0200 Subject: [PATCH 088/244] (feat) clean up accounts service --- services/accounts_service.py | 529 ++++++++++++++--------------------- 1 file changed, 213 insertions(+), 316 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index e9efef04..e28189ac 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -7,10 +7,10 @@ from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction -from sqlalchemy import select from config import settings -from database import AsyncDatabaseManager, AccountRepository, Order, Trade +from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository +from services.market_data_feed_manager import MarketDataFeedManager from utils.connector_manager import ConnectorManager from utils.file_system import FileSystemUtil @@ -180,92 +180,57 @@ async def _ensure_account_connectors_initialized(self, account_name: str): await self.connector_manager.initialize_connector_with_tracking( account_name, connector_name, self.db_manager ) - await self._update_connector_balance(account_name, connector_name) + # Force initial balance update to ensure first dump has data + connector = self.connector_manager.get_connector(account_name, connector_name) + await connector._update_balances() except Exception as e: logging.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") - async def _update_connector_balance(self, account_name: str, connector_name: str): - """ - Update balance for a specific connector and store in accounts_state. - This is called after connector initialization to get initial balance data. - """ - try: - tokens_info = [] - connector = self.connector_manager.get_connector(account_name, connector_name) - await connector._update_balances() - balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if - value != Decimal("0") and key not in settings.banned_tokens] - unique_tokens = [balance["token"] for balance in balances] - trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] - last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) - - for balance in balances: - token = balance["token"] - if "USD" in token: - price = Decimal("1") - else: - market = self.get_default_market(balance["token"], connector_name) - price = Decimal(last_traded_prices.get(market, 0)) - tokens_info.append({ - "token": balance["token"], - "units": float(balance["units"]), - "price": float(price), - "value": float(price * balance["units"]), - "available_units": float(connector.get_available_balance(balance["token"])) - }) - - # Ensure account exists in accounts_state before assignment - if account_name not in self.accounts_state: - self.accounts_state[account_name] = {} - - self.accounts_state[account_name][connector_name] = tokens_info - - logging.info(f"Updated balance for {account_name}/{connector_name}: {len(tokens_info)} tokens") - - except Exception as e: - logging.error(f"Error updating balance for connector {connector_name} in account {account_name}: {e}") - # Set empty state if update fails - if account_name not in self.accounts_state: - self.accounts_state[account_name] = {} - self.accounts_state[account_name][connector_name] = [] async def update_account_state(self): - # Get all connectors from ConnectorManager + """Update account state for all connectors.""" all_connectors = self.connector_manager.get_all_connectors() for account_name, connectors in all_connectors.items(): if account_name not in self.accounts_state: self.accounts_state[account_name] = {} for connector_name, connector in connectors.items(): - tokens_info = [] try: - balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if - value != Decimal("0") and key not in settings.banned_tokens] - unique_tokens = [balance["token"] for balance in balances] - trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] - last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) - for balance in balances: - token = balance["token"] - if "USD" in token: - price = Decimal("1") - else: - market = self.get_default_market(balance["token"], connector_name) - price = Decimal(last_traded_prices.get(market, 0)) - tokens_info.append({ - "token": balance["token"], - "units": float(balance["units"]), - "price": float(price), - "value": float(price * balance["units"]), - "available_units": float(connector.get_available_balance(balance["token"])) - }) + tokens_info = await self._get_connector_tokens_info(connector, connector_name) + self.accounts_state[account_name][connector_name] = tokens_info except Exception as e: - logging.error( - f"Error updating balances for connector {connector_name} in account {account_name}: {e}") - self.accounts_state[account_name][connector_name] = tokens_info - + logging.error(f"Error updating balances for connector {connector_name} in account {account_name}: {e}") + self.accounts_state[account_name][connector_name] = [] + + async def _get_connector_tokens_info(self, connector, connector_name: str) -> List[Dict]: + """Get token info from a connector instance.""" + balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if + value != Decimal("0") and key not in settings.banned_tokens] + unique_tokens = [balance["token"] for balance in balances] + trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] + last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) + + tokens_info = [] + for balance in balances: + token = balance["token"] + if "USD" in token: + price = Decimal("1") + else: + market = self.get_default_market(balance["token"], connector_name) + price = Decimal(last_traded_prices.get(market, 0)) + tokens_info.append({ + "token": balance["token"], + "units": float(balance["units"]), + "price": float(price), + "value": float(price * balance["units"]), + "available_units": float(connector.get_available_balance(balance["token"])) + }) + return tokens_info + async def _safe_get_last_traded_prices(self, connector, trading_pairs, timeout=10): + """Safely get last traded prices with timeout and error handling.""" try: last_traded = await asyncio.wait_for(connector.get_last_traded_prices(trading_pairs=trading_pairs), timeout=timeout) return last_traded @@ -299,10 +264,9 @@ async def add_credentials(self, account_name: str, connector_name: str, credenti await self.connector_manager.initialize_connector_with_tracking( account_name, connector_name, self.db_manager ) - await self._update_connector_balance(account_name, connector_name) - - - + # Force initial balance update to ensure first dump has data + connector = self.connector_manager.get_connector(account_name, connector_name) + await connector._update_balances() @staticmethod def list_accounts(): """ @@ -311,7 +275,8 @@ def list_accounts(): """ return file_system.list_folders('credentials') - def list_credentials(self, account_name: str): + @staticmethod + def list_credentials(account_name: str): """ List all the credentials that are connected to the specified account. :param account_name: The name of the account. @@ -528,8 +493,8 @@ async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[ async def place_trade(self, account_name: str, connector_name: str, trading_pair: str, trade_type: TradeType, amount: Decimal, order_type: OrderType = OrderType.LIMIT, - price: Optional[Decimal] = None, position_action: Optional[PositionAction] = None, - market_data_manager = None) -> str: + price: Optional[Decimal] = None, position_action: PositionAction = PositionAction.OPEN, + market_data_manager: Optional[MarketDataFeedManager] = None) -> str: """ Place a trade using the specified account and connector. @@ -541,6 +506,8 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair amount: Amount to trade order_type: "LIMIT", "MARKET", or "LIMIT_MAKER" price: Price for limit orders (required for LIMIT and LIMIT_MAKER) + position_action: Position action for perpetual contracts (OPEN/CLOSE) + market_data_manager: Market data manager for price fetching Returns: Client order ID assigned by the connector @@ -563,73 +530,86 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] and price is None: raise HTTPException(status_code=400, detail="Price is required for LIMIT and LIMIT_MAKER orders") - # For market orders without price, get current market price - if order_type == OrderType.MARKET and price is None and market_data_manager: - try: - prices = await market_data_manager.get_prices(connector_name, [trading_pair]) - if trading_pair in prices and "error" not in prices: - price = Decimal(str(prices[trading_pair])) - logging.info(f"Retrieved market price for {trading_pair}: {price}") - else: - logging.warning(f"Could not get market price for {trading_pair}, using 0") - price = Decimal("0") - except Exception as e: - logging.error(f"Error getting market price for {trading_pair}: {e}") - price = Decimal("0") - - try: - # Check if this is a perpetual connector that needs position_action - is_perpetual = "_perpetual" in connector_name - - # Use default position action if not specified and it's a perpetual connector - if is_perpetual and position_action is None: - position_action = PositionAction.OPEN + # Check if trading rules are loaded + if not connector.trading_rules: + raise HTTPException( + status_code=503, + detail=f"Trading rules not yet loaded for {connector_name}. Please try again in a moment." + ) + + # Validate trading pair and get trading rule + if trading_pair not in connector.trading_rules: + available_pairs = list(connector.trading_rules.keys())[:10] # Show first 10 + more_text = f" (and {len(connector.trading_rules) - 10} more)" if len(connector.trading_rules) > 10 else "" + raise HTTPException( + status_code=400, + detail=f"Trading pair '{trading_pair}' not supported on {connector_name}. " + f"Available pairs: {available_pairs}{more_text}" + ) + + trading_rule = connector.trading_rules[trading_pair] + + # Validate order type is supported + if order_type not in connector.supported_order_types(): + supported_types = [ot.name for ot in connector.supported_order_types()] + raise HTTPException(status_code=400, detail=f"Order type '{order_type.name}' not supported. Supported types: {supported_types}") + + # Quantize amount according to trading rules + quantized_amount = connector.quantize_order_amount(trading_pair, amount) + + # Validate minimum order size + if quantized_amount < trading_rule.min_order_size: + raise HTTPException( + status_code=400, + detail=f"Order amount {quantized_amount} is below minimum order size {trading_rule.min_order_size} for {trading_pair}" + ) + + # Calculate and validate notional size + if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER]: + quantized_price = connector.quantize_order_price(trading_pair, price) + notional_size = quantized_price * quantized_amount + else: + # For market orders, use current price + current_price = connector.get_price(trading_pair, False) + notional_size = current_price * quantized_amount - # Place the order using the connector + if notional_size < trading_rule.min_notional_size: + raise HTTPException( + status_code=400, + detail=f"Order notional value {notional_size} is below minimum notional size {trading_rule.min_notional_size} for {trading_pair}. " + f"Increase the amount or price to meet the minimum requirement." + ) + + # For market orders without price, get current market price for validation + if order_type == OrderType.MARKET and price is None: + if market_data_manager: + try: + prices = await market_data_manager.get_prices(connector_name, [trading_pair]) + if trading_pair in prices and "error" not in prices: + price = Decimal(str(prices[trading_pair])) + except Exception as e: + logging.error(f"Error getting market price for {trading_pair}: {e}") + + try: + # Place the order using the connector with quantized values + # (position_action will be ignored by non-perpetual connectors) if trade_type == TradeType.BUY: - if is_perpetual: - order_id = connector.buy( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=price or Decimal("0"), - position_action=position_action - ) - else: - order_id = connector.buy( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=price or Decimal("0") - ) + order_id = connector.buy( + trading_pair=trading_pair, + amount=quantized_amount, + order_type=order_type, + price=price or Decimal("1"), + position_action=position_action + ) else: - if is_perpetual: - order_id = connector.sell( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=price or Decimal("0"), - position_action=position_action - ) - else: - order_id = connector.sell( - trading_pair=trading_pair, - amount=amount, - order_type=order_type, - price=price or Decimal("0") - ) - - # Wait briefly to check for immediate failures - await asyncio.sleep(0.5) - - # Check if order was immediately rejected or failed - if order_id in connector.in_flight_orders: - order = connector.in_flight_orders[order_id] - if hasattr(order, 'last_state') and order.last_state in ["FAILED", "CANCELLED"]: - error_msg = f"Order failed immediately: {getattr(order, 'last_failure_reason', 'Unknown error')}" - logging.error(error_msg) - raise HTTPException(status_code=400, detail=error_msg) - + order_id = connector.sell( + trading_pair=trading_pair, + amount=quantized_amount, + order_type=order_type, + price=price or Decimal("1"), + position_action=position_action + ) + logging.info(f"Placed {trade_type} order for {amount} {trading_pair} on {connector_name} (Account: {account_name}). Order ID: {order_id}") return order_id @@ -699,216 +679,133 @@ async def cancel_order(self, account_name: str, connector_name: str, except Exception as e: logging.error(f"Failed to cancel order {client_order_id}: {e}") raise HTTPException(status_code=500, detail=f"Failed to cancel order: {str(e)}") + + async def set_leverage(self, account_name: str, connector_name: str, + trading_pair: str, leverage: int) -> Dict[str, str]: + """ + Set leverage for a specific trading pair on a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector (must be perpetual) + trading_pair: Trading pair to set leverage for + leverage: Leverage value (typically 1-125) + + Returns: + Dictionary with success status and message + + Raises: + HTTPException: If account/connector not found, not perpetual, or operation fails + """ + # Validate this is a perpetual connector + if "_perpetual" not in connector_name: + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") + + connector = self.get_connector_instance(account_name, connector_name) + + # Check if connector has leverage functionality + if not hasattr(connector, '_execute_set_leverage'): + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' does not support leverage setting") + + try: + await connector._execute_set_leverage(trading_pair, leverage) + message = f"Leverage for {trading_pair} set to {leverage} on {connector_name}" + logging.info(f"Set leverage for {trading_pair} to {leverage} on {connector_name} (Account: {account_name})") + return {"status": "success", "message": message} + + except Exception as e: + logging.error(f"Failed to set leverage for {trading_pair} to {leverage}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to set leverage: {str(e)}") async def get_orders(self, account_name: Optional[str] = None, market: Optional[str] = None, symbol: Optional[str] = None, status: Optional[str] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, limit: int = 100, offset: int = 0) -> List[Dict]: - """Get order history using our AsyncDatabaseManager.""" + """Get order history using OrderRepository.""" await self.ensure_db_initialized() try: async with self.db_manager.get_session_context() as session: - query = select(Order) - - # Filter by account name if specified - if account_name: - query = query.where(Order.account_name == account_name) - - # Filter by connector name if specified - if market: - query = query.where(Order.connector_name == market) - - # Filter by trading pair if specified - if symbol: - query = query.where(Order.trading_pair == symbol) - - # Filter by status if specified - if status: - query = query.where(Order.status == status) - - # Filter by time range if specified - if start_time: - start_dt = datetime.fromtimestamp(start_time / 1000) # Convert from milliseconds - query = query.where(Order.created_at >= start_dt) - if end_time: - end_dt = datetime.fromtimestamp(end_time / 1000) # Convert from milliseconds - query = query.where(Order.created_at <= end_dt) - - query = query.order_by(Order.created_at.desc()) - query = query.limit(limit).offset(offset) - - result = await session.execute(query) - orders = result.scalars().all() - - # Convert to dict format - return [ - { - "order_id": order.client_order_id, - "account_name": order.account_name, - "connector_name": order.connector_name, - "trading_pair": order.trading_pair, - "trade_type": order.trade_type, - "order_type": order.order_type, - "amount": float(order.amount), - "price": float(order.price) if order.price else None, - "status": order.status, - "filled_amount": float(order.filled_amount), - "average_fill_price": float(order.average_fill_price) if order.average_fill_price else None, - "fee_paid": float(order.fee_paid) if order.fee_paid else None, - "fee_currency": order.fee_currency, - "created_at": order.created_at.isoformat(), - "updated_at": order.updated_at.isoformat(), - "exchange_order_id": order.exchange_order_id, - "error_message": order.error_message, - } - for order in orders - ] + order_repo = OrderRepository(session) + orders = await order_repo.get_orders( + account_name=account_name, + connector_name=market, + trading_pair=symbol, + status=status, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset + ) + return [order_repo.to_dict(order) for order in orders] except Exception as e: logging.error(f"Error getting orders: {e}") return [] async def get_active_orders_history(self, account_name: Optional[str] = None, market: Optional[str] = None, symbol: Optional[str] = None) -> List[Dict]: - """Get active orders from database""" + """Get active orders from database using OrderRepository.""" await self.ensure_db_initialized() try: async with self.db_manager.get_session_context() as session: - query = select(Order).where( - Order.status.in_(["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) + order_repo = OrderRepository(session) + orders = await order_repo.get_active_orders( + account_name=account_name, + connector_name=market, + trading_pair=symbol ) - - # Filter by account name if specified - if account_name: - query = query.where(Order.account_name == account_name) - - # Filter by connector name if specified - if market: - query = query.where(Order.connector_name == market) - - # Filter by trading pair if specified - if symbol: - query = query.where(Order.trading_pair == symbol) - - query = query.order_by(Order.created_at.desc()) - query = query.limit(1000) - - result = await session.execute(query) - orders = result.scalars().all() - - # Convert to dict format using same structure as get_orders - return [ - { - "order_id": order.client_order_id, - "account_name": order.account_name, - "connector_name": order.connector_name, - "trading_pair": order.trading_pair, - "trade_type": order.trade_type, - "order_type": order.order_type, - "amount": float(order.amount), - "price": float(order.price) if order.price else None, - "status": order.status, - "filled_amount": float(order.filled_amount), - "average_fill_price": float(order.average_fill_price) if order.average_fill_price else None, - "fee_paid": float(order.fee_paid) if order.fee_paid else None, - "fee_currency": order.fee_currency, - "created_at": order.created_at.isoformat(), - "updated_at": order.updated_at.isoformat(), - "exchange_order_id": order.exchange_order_id, - "error_message": order.error_message, - } - for order in orders - ] + return [order_repo.to_dict(order) for order in orders] except Exception as e: logging.error(f"Error getting active orders: {e}") return [] async def get_orders_summary(self, account_name: Optional[str] = None, start_time: Optional[int] = None, end_time: Optional[int] = None) -> Dict: - """Get order summary statistics""" - orders = await self.get_orders( - account_name=account_name, - start_time=start_time, - end_time=end_time, - limit=10000 # Get all for summary - ) + """Get order summary statistics using OrderRepository.""" + await self.ensure_db_initialized() - total_orders = len(orders) - filled_orders = sum(1 for o in orders if o.get("status") == "FILLED") - cancelled_orders = sum(1 for o in orders if o.get("status") == "CANCELLED") - failed_orders = sum(1 for o in orders if o.get("status") == "FAILED") - active_orders = sum(1 for o in orders if o.get("status") in ["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) - - return { - "total_orders": total_orders, - "filled_orders": filled_orders, - "cancelled_orders": cancelled_orders, - "failed_orders": failed_orders, - "active_orders": active_orders, - "fill_rate": filled_orders / total_orders if total_orders > 0 else 0, - } + try: + async with self.db_manager.get_session_context() as session: + order_repo = OrderRepository(session) + return await order_repo.get_orders_summary( + account_name=account_name, + start_time=start_time, + end_time=end_time + ) + except Exception as e: + logging.error(f"Error getting orders summary: {e}") + return { + "total_orders": 0, + "filled_orders": 0, + "cancelled_orders": 0, + "failed_orders": 0, + "active_orders": 0, + "fill_rate": 0, + } async def get_trades(self, account_name: Optional[str] = None, market: Optional[str] = None, symbol: Optional[str] = None, trade_type: Optional[str] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, limit: int = 100, offset: int = 0) -> List[Dict]: - """Get trade history using our AsyncDatabaseManager""" + """Get trade history using TradeRepository.""" await self.ensure_db_initialized() try: async with self.db_manager.get_session_context() as session: - # Join trades with orders to get account information - query = select(Trade).join(Order, Trade.order_id == Order.id) - - # Filter by account name if specified - if account_name: - query = query.where(Order.account_name == account_name) - - # Filter by connector name if specified - if market: - query = query.where(Order.connector_name == market) - - # Filter by trading pair if specified - if symbol: - query = query.where(Trade.trading_pair == symbol) - - # Filter by trade type if specified - if trade_type: - query = query.where(Trade.trade_type == trade_type) - - # Filter by time range if specified - if start_time: - start_dt = datetime.fromtimestamp(start_time / 1000) # Convert from milliseconds - query = query.where(Trade.timestamp >= start_dt) - if end_time: - end_dt = datetime.fromtimestamp(end_time / 1000) # Convert from milliseconds - query = query.where(Trade.timestamp <= end_dt) - - query = query.order_by(Trade.timestamp.desc()) - query = query.limit(limit).offset(offset) - - result = await session.execute(query) - trades = result.scalars().all() - - # Convert to dict format - return [ - { - "trade_id": trade.trade_id, - "order_id": trade.order.client_order_id if trade.order else None, - "account_name": trade.order.account_name if trade.order else None, - "connector_name": trade.order.connector_name if trade.order else None, - "trading_pair": trade.trading_pair, - "trade_type": trade.trade_type, - "amount": float(trade.amount), - "price": float(trade.price), - "fee_paid": float(trade.fee_paid), - "fee_currency": trade.fee_currency, - "timestamp": trade.timestamp.isoformat(), - } - for trade in trades - ] + trade_repo = TradeRepository(session) + trade_order_pairs = await trade_repo.get_trades_with_orders( + account_name=account_name, + connector_name=market, + trading_pair=symbol, + trade_type=trade_type, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset + ) + return [trade_repo.to_dict(trade, order) for trade, order in trade_order_pairs] except Exception as e: logging.error(f"Error getting trades: {e}") return [] From 2840ec1b8dc84d6976e05a90a8adfe902d8183fb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 14 Jun 2025 08:17:05 +0200 Subject: [PATCH 089/244] (feat) move to repository model for orders and trades --- database/__init__.py | 4 +- database/repositories/order_repository.py | 170 ++++++++++++++++++++++ database/repositories/trade_repository.py | 103 +++++++++++++ 3 files changed, 276 insertions(+), 1 deletion(-) create mode 100644 database/repositories/order_repository.py create mode 100644 database/repositories/trade_repository.py diff --git a/database/__init__.py b/database/__init__.py index 70527e63..13b3fd4d 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -1,5 +1,7 @@ from .models import AccountState, TokenState, Order, Trade, Base from .connection import AsyncDatabaseManager from .repositories import AccountRepository +from .repositories.order_repository import OrderRepository +from .repositories.trade_repository import TradeRepository -__all__ = ["AccountState", "TokenState", "Order", "Trade", "Base", "AsyncDatabaseManager", "AccountRepository"] \ No newline at end of file +__all__ = ["AccountState", "TokenState", "Order", "Trade", "Base", "AsyncDatabaseManager", "AccountRepository", "OrderRepository", "TradeRepository"] \ No newline at end of file diff --git a/database/repositories/order_repository.py b/database/repositories/order_repository.py new file mode 100644 index 00000000..d3e06009 --- /dev/null +++ b/database/repositories/order_repository.py @@ -0,0 +1,170 @@ +from datetime import datetime +from typing import Dict, List, Optional +from decimal import Decimal + +from sqlalchemy import desc, select +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import Order + + +class OrderRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def create_order(self, order_data: Dict) -> Order: + """Create a new order record.""" + order = Order(**order_data) + self.session.add(order) + await self.session.flush() # Get the ID + return order + + async def get_order_by_client_id(self, client_order_id: str) -> Optional[Order]: + """Get an order by its client order ID.""" + result = await self.session.execute( + select(Order).where(Order.client_order_id == client_order_id) + ) + return result.scalar_one_or_none() + + async def update_order_status(self, client_order_id: str, status: str, + error_message: Optional[str] = None) -> Optional[Order]: + """Update order status and optional error message.""" + result = await self.session.execute( + select(Order).where(Order.client_order_id == client_order_id) + ) + order = result.scalar_one_or_none() + if order: + order.status = status + if error_message: + order.error_message = error_message + await self.session.flush() + return order + + async def update_order_fill(self, client_order_id: str, filled_amount: Decimal, + average_fill_price: Decimal, fee_paid: Decimal = None, + fee_currency: str = None, exchange_order_id: str = None) -> Optional[Order]: + """Update order with fill information.""" + result = await self.session.execute( + select(Order).where(Order.client_order_id == client_order_id) + ) + order = result.scalar_one_or_none() + if order: + order.filled_amount = float(filled_amount) + order.average_fill_price = float(average_fill_price) + if fee_paid is not None: + order.fee_paid = float(fee_paid) + if fee_currency: + order.fee_currency = fee_currency + if exchange_order_id: + order.exchange_order_id = exchange_order_id + + # Update status based on fill amount + if filled_amount >= Decimal(str(order.amount)): + order.status = "FILLED" + else: + order.status = "PARTIALLY_FILLED" + + await self.session.flush() + return order + + async def get_orders(self, account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, + status: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: int = 100, offset: int = 0) -> List[Order]: + """Get orders with filtering and pagination.""" + query = select(Order) + + # Apply filters + if account_name: + query = query.where(Order.account_name == account_name) + if connector_name: + query = query.where(Order.connector_name == connector_name) + if trading_pair: + query = query.where(Order.trading_pair == trading_pair) + if status: + query = query.where(Order.status == status) + if start_time: + start_dt = datetime.fromtimestamp(start_time / 1000) + query = query.where(Order.created_at >= start_dt) + if end_time: + end_dt = datetime.fromtimestamp(end_time / 1000) + query = query.where(Order.created_at <= end_dt) + + # Apply ordering and pagination + query = query.order_by(Order.created_at.desc()) + query = query.limit(limit).offset(offset) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_active_orders(self, account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None) -> List[Order]: + """Get active orders (SUBMITTED, OPEN, PARTIALLY_FILLED).""" + query = select(Order).where( + Order.status.in_(["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) + ) + + # Apply filters + if account_name: + query = query.where(Order.account_name == account_name) + if connector_name: + query = query.where(Order.connector_name == connector_name) + if trading_pair: + query = query.where(Order.trading_pair == trading_pair) + + query = query.order_by(Order.created_at.desc()).limit(1000) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_orders_summary(self, account_name: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None) -> Dict: + """Get order summary statistics.""" + orders = await self.get_orders( + account_name=account_name, + start_time=start_time, + end_time=end_time, + limit=10000 # Get all for summary + ) + + total_orders = len(orders) + filled_orders = sum(1 for o in orders if o.status == "FILLED") + cancelled_orders = sum(1 for o in orders if o.status == "CANCELLED") + failed_orders = sum(1 for o in orders if o.status == "FAILED") + active_orders = sum(1 for o in orders if o.status in ["SUBMITTED", "OPEN", "PARTIALLY_FILLED"]) + + return { + "total_orders": total_orders, + "filled_orders": filled_orders, + "cancelled_orders": cancelled_orders, + "failed_orders": failed_orders, + "active_orders": active_orders, + "fill_rate": filled_orders / total_orders if total_orders > 0 else 0, + } + + def to_dict(self, order: Order) -> Dict: + """Convert Order model to dictionary format.""" + return { + "order_id": order.client_order_id, + "account_name": order.account_name, + "connector_name": order.connector_name, + "trading_pair": order.trading_pair, + "trade_type": order.trade_type, + "order_type": order.order_type, + "amount": float(order.amount), + "price": float(order.price) if order.price else None, + "status": order.status, + "filled_amount": float(order.filled_amount), + "average_fill_price": float(order.average_fill_price) if order.average_fill_price else None, + "fee_paid": float(order.fee_paid) if order.fee_paid else None, + "fee_currency": order.fee_currency, + "created_at": order.created_at.isoformat(), + "updated_at": order.updated_at.isoformat(), + "exchange_order_id": order.exchange_order_id, + "error_message": order.error_message, + } \ No newline at end of file diff --git a/database/repositories/trade_repository.py b/database/repositories/trade_repository.py new file mode 100644 index 00000000..d9f10ad2 --- /dev/null +++ b/database/repositories/trade_repository.py @@ -0,0 +1,103 @@ +from datetime import datetime +from typing import Dict, List, Optional + +from sqlalchemy import desc, select +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import Trade, Order + + +class TradeRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def create_trade(self, trade_data: Dict) -> Trade: + """Create a new trade record.""" + trade = Trade(**trade_data) + self.session.add(trade) + await self.session.flush() # Get the ID + return trade + + async def get_trades(self, account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, + trade_type: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: int = 100, offset: int = 0) -> List[Trade]: + """Get trades with filtering and pagination.""" + # Join trades with orders to get account information + query = select(Trade).join(Order, Trade.order_id == Order.id) + + # Apply filters + if account_name: + query = query.where(Order.account_name == account_name) + if connector_name: + query = query.where(Order.connector_name == connector_name) + if trading_pair: + query = query.where(Trade.trading_pair == trading_pair) + if trade_type: + query = query.where(Trade.trade_type == trade_type) + if start_time: + start_dt = datetime.fromtimestamp(start_time / 1000) + query = query.where(Trade.timestamp >= start_dt) + if end_time: + end_dt = datetime.fromtimestamp(end_time / 1000) + query = query.where(Trade.timestamp <= end_dt) + + # Apply ordering and pagination + query = query.order_by(Trade.timestamp.desc()) + query = query.limit(limit).offset(offset) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_trades_with_orders(self, account_name: Optional[str] = None, + connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, + trade_type: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: int = 100, offset: int = 0) -> List[tuple]: + """Get trades with their associated order information.""" + # Join trades with orders to get complete information + query = select(Trade, Order).join(Order, Trade.order_id == Order.id) + + # Apply filters + if account_name: + query = query.where(Order.account_name == account_name) + if connector_name: + query = query.where(Order.connector_name == connector_name) + if trading_pair: + query = query.where(Trade.trading_pair == trading_pair) + if trade_type: + query = query.where(Trade.trade_type == trade_type) + if start_time: + start_dt = datetime.fromtimestamp(start_time / 1000) + query = query.where(Trade.timestamp >= start_dt) + if end_time: + end_dt = datetime.fromtimestamp(end_time / 1000) + query = query.where(Trade.timestamp <= end_dt) + + # Apply ordering and pagination + query = query.order_by(Trade.timestamp.desc()) + query = query.limit(limit).offset(offset) + + result = await self.session.execute(query) + return result.all() # Returns tuples of (Trade, Order) + + def to_dict(self, trade: Trade, order: Optional[Order] = None) -> Dict: + """Convert Trade model to dictionary format.""" + return { + "trade_id": trade.trade_id, + "order_id": order.client_order_id if order else None, + "account_name": order.account_name if order else None, + "connector_name": order.connector_name if order else None, + "trading_pair": trade.trading_pair, + "trade_type": trade.trade_type, + "amount": float(trade.amount), + "price": float(trade.price), + "fee_paid": float(trade.fee_paid), + "fee_currency": trade.fee_currency, + "timestamp": trade.timestamp.isoformat(), + } \ No newline at end of file From d152016224da5cade32df7390a3b5e669c8f605b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 14 Jun 2025 08:18:51 +0200 Subject: [PATCH 090/244] (feat) add position action to trade request --- models/bot.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/models/bot.py b/models/bot.py index 6423705c..94854a64 100644 --- a/models/bot.py +++ b/models/bot.py @@ -56,7 +56,7 @@ class TradeRequest(BaseModel): amount: Decimal = Field(description="Amount to trade", gt=0) order_type: OrderType = Field(default=OrderType.LIMIT, description="Type of order") price: Optional[Decimal] = Field(default=None, description="Price for limit orders") - position_action: Optional[PositionAction] = Field(default=PositionAction.OPEN, description="Position action for perpetual contracts (OPEN/CLOSE)") + position_action: PositionAction = Field(default=PositionAction.OPEN, description="Position action for perpetual contracts (OPEN/CLOSE)") class TradeResponse(BaseModel): @@ -72,6 +72,14 @@ class TradeResponse(BaseModel): status: str = Field(default="submitted", description="Order status") +class LeverageRequest(BaseModel): + """Request model for setting leverage on perpetual connectors""" + account_name: str = Field(description="Name of the account") + connector_name: str = Field(description="Name of the perpetual connector") + trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") + leverage: int = Field(description="Leverage value (typically 1-125)", ge=1, le=125) + + class BotAction(BaseModel): bot_name: str = Field(description="Name of the bot instance to act upon") From d27b98b85c167c4b53346cc446c23b63d30b173f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 14 Jun 2025 08:21:32 +0200 Subject: [PATCH 091/244] (feat) separate accounts management from trading --- routers/accounts.py | 593 ++--------------------------------------- routers/trading.py | 636 ++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 663 insertions(+), 566 deletions(-) create mode 100644 routers/trading.py diff --git a/routers/accounts.py b/routers/accounts.py index c14d7d11..ef6393a6 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -7,55 +7,13 @@ from services.accounts_service import AccountsService from utils.file_system import FileSystemUtil -from deps import get_accounts_service, get_market_data_feed_manager +from deps import get_accounts_service from models import PaginatedResponse -from models.bot import TradeRequest, TradeResponse router = APIRouter(tags=["Accounts"], prefix="/accounts") file_system = FileSystemUtil(base_path="bots/credentials") -@router.get("/state", response_model=Dict[str, Dict[str, List[Dict]]]) -async def get_all_accounts_state(accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get the current state of all accounts. - - Returns: - Dict containing account states with connector balances and token information - """ - return accounts_service.get_accounts_state() - - -@router.get("/history", response_model=PaginatedResponse) -async def get_account_state_history( - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get the historical state of all accounts with pagination. - """ - try: - data, next_cursor, has_more = await accounts_service.load_account_state_history( - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) - - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor - } - ) - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) @router.get("/connectors", response_model=List[str]) @@ -83,6 +41,22 @@ async def get_connector_config_map(connector_name: str, accounts_service: Accoun return accounts_service.get_connector_config_map(connector_name) +@router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) +async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get configuration fields required for all connectors. + + Returns: + Dictionary mapping connector names to their required configuration fields + """ + all_connectors = list(AllConnectorSettings.get_connector_settings().keys()) + config_maps = {} + for connector_name in all_connectors: + try: + config_maps[connector_name] = accounts_service.get_connector_config_map(connector_name) + except Exception as e: + config_maps[connector_name] = [] + return config_maps @router.get("/", response_model=List[str]) @@ -188,540 +162,27 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di raise HTTPException(status_code=400, detail=str(e)) -# Account-specific routes -@router.get("/{account_name}/state", response_model=Dict[str, List[Dict]]) -async def get_account_state(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): +# Account-specific credential management +@router.get("/{account_name}/credentials", response_model=List[str]) +async def list_account_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): """ - Get current state of a specific account. + Get a list of all connectors that have credentials configured for a specific account. Args: - account_name: Name of the account to get state for + account_name: Name of the account to list credentials for Returns: - Dictionary mapping connector names to lists of token information + List of connector names that have credentials configured Raises: HTTPException: 404 if account not found """ - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - return state - - -@router.get("/{account_name}/state/history", response_model=PaginatedResponse) -async def get_account_history( - account_name: str, - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get historical state of a specific account with pagination. - - Args: - account_name: Name of the account to get history for - limit: Number of items per page (1-1000) - cursor: Cursor for pagination (ISO timestamp) - start_time: Start time for filtering results - end_time: End time for filtering results - - Returns: - Paginated response with historical account state data - """ - data, next_cursor, has_more = await accounts_service.get_account_state_history( - account_name=account_name, - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) - - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor, - "filters": { - "account_name": account_name, - "start_time": start_time.isoformat() if start_time else None, - "end_time": end_time.isoformat() if end_time else None - } - } - ) - -# Trading endpoints -@router.post("/trade", response_model=TradeResponse, status_code=status.HTTP_201_CREATED) -async def place_trade(trade_request: TradeRequest, - accounts_service: AccountsService = Depends(get_accounts_service), - market_data_manager = Depends(get_market_data_feed_manager)): - """ - Place a buy or sell order using a specific account and connector. - - Args: - trade_request: Trading request with account, connector, trading pair, type, amount, etc. - accounts_service: Injected accounts service - - Returns: - TradeResponse with order ID and trading details - - Raises: - HTTPException: 400 for invalid parameters, 404 for account/connector not found, 500 for trade execution errors - """ try: - order_id = await accounts_service.place_trade( - account_name=trade_request.account_name, - connector_name=trade_request.connector_name, - trading_pair=trade_request.trading_pair, - trade_type=trade_request.trade_type, - amount=trade_request.amount, - order_type=trade_request.order_type, - price=trade_request.price, - position_action=trade_request.position_action, - market_data_manager=market_data_manager - ) - - return TradeResponse( - order_id=order_id, - account_name=trade_request.account_name, - connector_name=trade_request.connector_name, - trading_pair=trade_request.trading_pair, - trade_type=trade_request.trade_type, - amount=trade_request.amount, - order_type=trade_request.order_type, - price=trade_request.price, - status="submitted" - ) + credentials = accounts_service.list_credentials(account_name) + # Remove .yml extension from filenames + return [cred.replace('.yml', '') for cred in credentials] except HTTPException: raise except Exception as e: - raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") - - -@router.get("/{account_name}/connectors/{connector_name}/orders", response_model=Dict[str, Dict]) -async def get_active_orders(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get all active orders for a specific account and connector. - - Args: - account_name: Name of the account - connector_name: Name of the connector - accounts_service: Injected accounts service - - Returns: - Dictionary mapping order IDs to order details - - Raises: - HTTPException: 404 if account or connector not found - """ - try: - return accounts_service.get_active_orders(account_name, connector_name) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving orders: {str(e)}") - - -@router.post("/{account_name}/connectors/{connector_name}/orders/{client_order_id}/cancel") -async def cancel_order(account_name: str, connector_name: str, client_order_id: str, - trading_pair: str = Query(..., description="Trading pair for the order to cancel"), - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Cancel a specific order by its client order ID. - - Args: - account_name: Name of the account - connector_name: Name of the connector - client_order_id: Client order ID to cancel - trading_pair: Trading pair for the order - accounts_service: Injected accounts service - - Returns: - Success message with cancelled order ID - - Raises: - HTTPException: 404 if account/connector not found, 500 for cancellation errors - """ - try: - cancelled_order_id = await accounts_service.cancel_order( - account_name=account_name, - connector_name=connector_name, - trading_pair=trading_pair, - client_order_id=client_order_id - ) - return {"message": f"Order {cancelled_order_id} cancelled successfully"} - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") - - -@router.get("/{account_name}/connectors/{connector_name}/trading-rules/{trading_pair}") -async def get_trading_rules(account_name: str, connector_name: str, trading_pair: str, - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get trading rules for a specific trading pair on a connector. - - Args: - account_name: Name of the account - connector_name: Name of the connector - trading_pair: Trading pair to get rules for - accounts_service: Injected accounts service - - Returns: - Trading rules including minimum order size, price increment, etc. - - Raises: - HTTPException: 404 if account/connector/trading pair not found - """ - try: - connector = accounts_service.get_connector_instance(account_name, connector_name) - - if trading_pair not in connector.trading_rules: - raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found") - - trading_rule = connector.trading_rules[trading_pair] - return { - "trading_pair": trading_pair, - "min_order_size": float(trading_rule.min_order_size), - "max_order_size": float(trading_rule.max_order_size) if trading_rule.max_order_size else None, - "min_price_increment": float(trading_rule.min_price_increment), - "min_base_amount_increment": float(trading_rule.min_base_amount_increment), - "min_notional_size": float(trading_rule.min_notional_size), - "max_price_significant_digits": trading_rule.max_price_significant_digits, - "max_quantity_significant_digits": trading_rule.max_quantity_significant_digits, - "supports_limit_orders": trading_rule.supports_limit_orders, - "supports_market_orders": trading_rule.supports_market_orders, - } - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") - - -@router.get("/{account_name}/connectors/{connector_name}/supported-order-types") -async def get_supported_order_types(account_name: str, connector_name: str, - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get order types supported by a specific connector. - - Args: - account_name: Name of the account - connector_name: Name of the connector - accounts_service: Injected accounts service - - Returns: - List of supported order types (LIMIT, MARKET, LIMIT_MAKER) - - Raises: - HTTPException: 404 if account or connector not found - """ - try: - connector = accounts_service.get_connector_instance(account_name, connector_name) - return [order_type.name for order_type in connector.supported_order_types()] - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") - - -# Global order/trade endpoints for all accounts -@router.get("/orders", response_model=List[Dict]) -async def get_all_orders( - market: Optional[str] = Query(None, description="Filter by market/connector"), - symbol: Optional[str] = Query(None, description="Filter by trading pair"), - status: Optional[str] = Query(None, description="Filter by order status"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), - offset: int = Query(0, ge=0, description="Number of orders to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get order history across all accounts. - - Args: - market: Optional filter by market/connector - symbol: Optional filter by trading pair - status: Optional filter by order status - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of orders to return - offset: Number of orders to skip - - Returns: - List of orders across all accounts - """ - return await accounts_service.get_orders( - account_name=None, # Query all accounts - market=market, - symbol=symbol, - status=status, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - -@router.get("/orders/active", response_model=List[Dict]) -async def get_all_active_orders( - market: Optional[str] = Query(None, description="Filter by market/connector"), - symbol: Optional[str] = Query(None, description="Filter by trading pair"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get active orders across all accounts. - - Args: - market: Optional filter by market/connector - symbol: Optional filter by trading pair - - Returns: - List of active orders across all accounts - """ - return await accounts_service.get_active_orders_history( - account_name=None, # Query all accounts - market=market, - symbol=symbol, - ) - - -@router.get("/orders/summary", response_model=Dict) -async def get_all_orders_summary( - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get order summary statistics across all accounts. - - Args: - start_time: Optional start timestamp - end_time: Optional end timestamp - - Returns: - Order summary statistics including fill rate, volumes, etc. - """ - return await accounts_service.get_orders_summary( - account_name=None, # Query all accounts - start_time=start_time, - end_time=end_time, - ) - - -@router.get("/trades", response_model=List[Dict]) -async def get_all_trades( - market: Optional[str] = Query(None, description="Filter by market/connector"), - symbol: Optional[str] = Query(None, description="Filter by trading pair"), - trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), - offset: int = Query(0, ge=0, description="Number of trades to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get trade history across all accounts. - - Args: - market: Optional filter by market/connector - symbol: Optional filter by trading pair - trade_type: Optional filter by trade type - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of trades to return - offset: Number of trades to skip - - Returns: - List of trades across all accounts - """ - return await accounts_service.get_trades( - account_name=None, # Query all accounts - market=market, - symbol=symbol, - trade_type=trade_type, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - -# Order history endpoints integrated with accounts -@router.get("/{account_name}/orders", response_model=List[Dict]) -async def get_account_orders( - account_name: str, - connector_name: Optional[str] = Query(None, description="Filter by connector"), - trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), - status: Optional[str] = Query(None, description="Filter by order status"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), - offset: int = Query(0, ge=0, description="Number of orders to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get order history for a specific account. - - Args: - account_name: Name of the account - connector_name: Optional filter by connector - trading_pair: Optional filter by trading pair - status: Optional filter by order status - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of orders to return - offset: Number of orders to skip - - Returns: - List of orders for the account - - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - # Get orders from accounts service (will be implemented) - orders = await accounts_service.get_orders( - account_name=account_name, - market=connector_name, - symbol=trading_pair, - status=status, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - return orders - - -@router.get("/{account_name}/orders/active", response_model=List[Dict]) -async def get_account_active_orders( - account_name: str, - connector_name: Optional[str] = Query(None, description="Filter by connector"), - trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get active orders for a specific account. - - Args: - account_name: Name of the account - connector_name: Optional filter by connector - trading_pair: Optional filter by trading pair - - Returns: - List of active orders - - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - # Get active orders from accounts service (will be implemented) - orders = await accounts_service.get_active_orders_history( - account_name=account_name, - market=connector_name, - symbol=trading_pair, - ) - - return orders - - -@router.get("/{account_name}/orders/summary", response_model=Dict) -async def get_account_orders_summary( - account_name: str, - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get order summary statistics for a specific account. - - Args: - account_name: Name of the account - start_time: Optional start timestamp - end_time: Optional end timestamp - - Returns: - Order summary statistics including fill rate, volumes, etc. - - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - # Get summary from accounts service (will be implemented) - summary = await accounts_service.get_orders_summary( - account_name=account_name, - start_time=start_time, - end_time=end_time, - ) - - return summary - + raise HTTPException(status_code=500, detail=str(e)) -@router.get("/{account_name}/trades", response_model=List[Dict]) -async def get_account_trades( - account_name: str, - connector_name: Optional[str] = Query(None, description="Filter by connector"), - trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), - trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), - offset: int = Query(0, ge=0, description="Number of trades to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get trade history for a specific account. - - Args: - account_name: Name of the account - connector_name: Optional filter by connector - trading_pair: Optional filter by trading pair - trade_type: Optional filter by trade type - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of trades to return - offset: Number of trades to skip - - Returns: - List of trades for the account - - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - # Get trades from accounts service (will be implemented) - trades = await accounts_service.get_trades( - account_name=account_name, - market=connector_name, - symbol=trading_pair, - trade_type=trade_type, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - return trades diff --git a/routers/trading.py b/routers/trading.py new file mode 100644 index 00000000..951b0efc --- /dev/null +++ b/routers/trading.py @@ -0,0 +1,636 @@ +from typing import Dict, List, Optional +from datetime import datetime + +from fastapi import APIRouter, HTTPException, Depends, Query +from starlette import status + +from services.accounts_service import AccountsService +from deps import get_accounts_service, get_market_data_feed_manager +from models import PaginatedResponse +from models.bot import TradeRequest, TradeResponse, LeverageRequest + +router = APIRouter(tags=["Trading"], prefix="/trading") + + +# Portfolio & Account State Monitoring +@router.get("/portfolio/state", response_model=Dict[str, Dict[str, List[Dict]]]) +async def get_portfolio_state(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get the current state of all accounts portfolio. + + Returns: + Dict containing all account states with connector balances and token information + """ + return accounts_service.get_accounts_state() + + +@router.get("/portfolio/history", response_model=PaginatedResponse) +async def get_portfolio_history( + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get the historical state of all accounts portfolio with pagination. + """ + try: + data, next_cursor, has_more = await accounts_service.load_account_state_history( + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor + } + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/{account_name}/balance", response_model=Dict[str, List[Dict]]) +async def get_account_balance(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get current balance state of a specific account. + + Args: + account_name: Name of the account to get balance for + + Returns: + Dictionary mapping connector names to lists of token information + + Raises: + HTTPException: 404 if account not found + """ + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + return state + + +@router.get("/{account_name}/balance/history", response_model=PaginatedResponse) +async def get_account_balance_history( + account_name: str, + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get historical balance state of a specific account with pagination. + + Args: + account_name: Name of the account to get history for + limit: Number of items per page (1-1000) + cursor: Cursor for pagination (ISO timestamp) + start_time: Start time for filtering results + end_time: End time for filtering results + + Returns: + Paginated response with historical account balance data + """ + data, next_cursor, has_more = await accounts_service.get_account_state_history( + account_name=account_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor, + "filters": { + "account_name": account_name, + "start_time": start_time.isoformat() if start_time else None, + "end_time": end_time.isoformat() if end_time else None + } + } + ) + + +# Trade Execution +@router.post("/orders", response_model=TradeResponse, status_code=status.HTTP_201_CREATED) +async def place_trade(trade_request: TradeRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + market_data_manager = Depends(get_market_data_feed_manager)): + """ + Place a buy or sell order using a specific account and connector. + + Args: + trade_request: Trading request with account, connector, trading pair, type, amount, etc. + accounts_service: Injected accounts service + market_data_manager: Market data manager for price fetching + + Returns: + TradeResponse with order ID and trading details + + Raises: + HTTPException: 400 for invalid parameters, 404 for account/connector not found, 500 for trade execution errors + """ + try: + order_id = await accounts_service.place_trade( + account_name=trade_request.account_name, + connector_name=trade_request.connector_name, + trading_pair=trade_request.trading_pair, + trade_type=trade_request.trade_type, + amount=trade_request.amount, + order_type=trade_request.order_type, + price=trade_request.price, + position_action=trade_request.position_action, + market_data_manager=market_data_manager + ) + + return TradeResponse( + order_id=order_id, + account_name=trade_request.account_name, + connector_name=trade_request.connector_name, + trading_pair=trade_request.trading_pair, + trade_type=trade_request.trade_type, + amount=trade_request.amount, + order_type=trade_request.order_type, + price=trade_request.price, + status="submitted" + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") + + +@router.post("/leverage", response_model=Dict[str, str], status_code=status.HTTP_200_OK) +async def set_leverage(leverage_request: LeverageRequest, + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Set leverage for a specific trading pair on a perpetual connector. + + Args: + leverage_request: Leverage request with account, connector, trading pair, and leverage value + accounts_service: Injected accounts service + + Returns: + Dictionary with success status and message + + Raises: + HTTPException: 400 for invalid parameters or non-perpetual connector, 404 for account/connector not found, 500 for execution errors + """ + try: + result = await accounts_service.set_leverage( + account_name=leverage_request.account_name, + connector_name=leverage_request.connector_name, + trading_pair=leverage_request.trading_pair, + leverage=leverage_request.leverage + ) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") + + +# Order Management +@router.get("/{account_name}/{connector_name}/orders/active", response_model=Dict[str, Dict]) +async def get_connector_active_orders(account_name: str, connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get all active orders for a specific account and connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector + accounts_service: Injected accounts service + + Returns: + Dictionary mapping order IDs to order details + + Raises: + HTTPException: 404 if account or connector not found + """ + try: + return accounts_service.get_active_orders(account_name, connector_name) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving orders: {str(e)}") + + +@router.post("/{account_name}/{connector_name}/orders/{client_order_id}/cancel") +async def cancel_order(account_name: str, connector_name: str, client_order_id: str, + trading_pair: str = Query(..., description="Trading pair for the order to cancel"), + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Cancel a specific order by its client order ID. + + Args: + account_name: Name of the account + connector_name: Name of the connector + client_order_id: Client order ID to cancel + trading_pair: Trading pair for the order + accounts_service: Injected accounts service + + Returns: + Success message with cancelled order ID + + Raises: + HTTPException: 404 if account/connector not found, 500 for cancellation errors + """ + try: + cancelled_order_id = await accounts_service.cancel_order( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + client_order_id=client_order_id + ) + return {"message": f"Order {cancelled_order_id} cancelled successfully"} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") + + +# Global Order History +@router.get("/orders", response_model=List[Dict]) +async def get_all_orders( + market: Optional[str] = Query(None, description="Filter by market/connector"), + symbol: Optional[str] = Query(None, description="Filter by trading pair"), + status: Optional[str] = Query(None, description="Filter by order status"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), + offset: int = Query(0, ge=0, description="Number of orders to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get order history across all accounts. + + Args: + market: Optional filter by market/connector + symbol: Optional filter by trading pair + status: Optional filter by order status + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of orders to return + offset: Number of orders to skip + + Returns: + List of orders across all accounts + """ + return await accounts_service.get_orders( + account_name=None, # Query all accounts + market=market, + symbol=symbol, + status=status, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) + + +@router.get("/orders/active", response_model=List[Dict]) +async def get_all_active_orders( + market: Optional[str] = Query(None, description="Filter by market/connector"), + symbol: Optional[str] = Query(None, description="Filter by trading pair"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get active orders across all accounts. + + Args: + market: Optional filter by market/connector + symbol: Optional filter by trading pair + accounts_service: Injected accounts service + + Returns: + List of active orders across all accounts + """ + return await accounts_service.get_active_orders_history( + account_name=None, # Query all accounts + market=market, + symbol=symbol, + ) + + +@router.get("/orders/summary", response_model=Dict) +async def get_all_orders_summary( + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get order summary statistics across all accounts. + + Args: + start_time: Optional start timestamp + end_time: Optional end timestamp + accounts_service: Injected accounts service + + Returns: + Order summary statistics including fill rate, volumes, etc. + """ + return await accounts_service.get_orders_summary( + account_name=None, # Query all accounts + start_time=start_time, + end_time=end_time, + ) + + +# Account-Specific Order History +@router.get("/{account_name}/orders", response_model=List[Dict]) +async def get_account_orders( + account_name: str, + connector_name: Optional[str] = Query(None, description="Filter by connector"), + trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), + status: Optional[str] = Query(None, description="Filter by order status"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), + offset: int = Query(0, ge=0, description="Number of orders to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get order history for a specific account. + + Args: + account_name: Name of the account + connector_name: Optional filter by connector + trading_pair: Optional filter by trading pair + status: Optional filter by order status + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of orders to return + offset: Number of orders to skip + accounts_service: Injected accounts service + + Returns: + List of orders for the account + + Raises: + HTTPException: 404 if account not found + """ + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + orders = await accounts_service.get_orders( + account_name=account_name, + market=connector_name, + symbol=trading_pair, + status=status, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) + + return orders + + +@router.get("/{account_name}/orders/active", response_model=List[Dict]) +async def get_account_active_orders( + account_name: str, + connector_name: Optional[str] = Query(None, description="Filter by connector"), + trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get active orders for a specific account. + + Args: + account_name: Name of the account + connector_name: Optional filter by connector + trading_pair: Optional filter by trading pair + accounts_service: Injected accounts service + + Returns: + List of active orders + + Raises: + HTTPException: 404 if account not found + """ + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + orders = await accounts_service.get_active_orders_history( + account_name=account_name, + market=connector_name, + symbol=trading_pair, + ) + + return orders + + +@router.get("/{account_name}/orders/summary", response_model=Dict) +async def get_account_orders_summary( + account_name: str, + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get order summary statistics for a specific account. + + Args: + account_name: Name of the account + start_time: Optional start timestamp + end_time: Optional end timestamp + accounts_service: Injected accounts service + + Returns: + Order summary statistics including fill rate, volumes, etc. + + Raises: + HTTPException: 404 if account not found + """ + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + summary = await accounts_service.get_orders_summary( + account_name=account_name, + start_time=start_time, + end_time=end_time, + ) + + return summary + + +# Trade History +@router.get("/trades", response_model=List[Dict]) +async def get_all_trades( + market: Optional[str] = Query(None, description="Filter by market/connector"), + symbol: Optional[str] = Query(None, description="Filter by trading pair"), + trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), + offset: int = Query(0, ge=0, description="Number of trades to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get trade history across all accounts. + + Args: + market: Optional filter by market/connector + symbol: Optional filter by trading pair + trade_type: Optional filter by trade type + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of trades to return + offset: Number of trades to skip + accounts_service: Injected accounts service + + Returns: + List of trades across all accounts + """ + return await accounts_service.get_trades( + account_name=None, # Query all accounts + market=market, + symbol=symbol, + trade_type=trade_type, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) + + +@router.get("/{account_name}/trades", response_model=List[Dict]) +async def get_account_trades( + account_name: str, + connector_name: Optional[str] = Query(None, description="Filter by connector"), + trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), + trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), + start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), + end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), + limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), + offset: int = Query(0, ge=0, description="Number of trades to skip"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get trade history for a specific account. + + Args: + account_name: Name of the account + connector_name: Optional filter by connector + trading_pair: Optional filter by trading pair + trade_type: Optional filter by trade type + start_time: Optional start timestamp + end_time: Optional end timestamp + limit: Maximum number of trades to return + offset: Number of trades to skip + accounts_service: Injected accounts service + + Returns: + List of trades for the account + + Raises: + HTTPException: 404 if account not found + """ + # Verify account exists + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + trades = await accounts_service.get_trades( + account_name=account_name, + market=connector_name, + symbol=trading_pair, + trade_type=trade_type, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset, + ) + + return trades + + +# Trading Rules & Configuration +@router.get("/{account_name}/{connector_name}/rules/{trading_pair}") +async def get_trading_rules(account_name: str, connector_name: str, trading_pair: str, + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get trading rules for a specific trading pair on a connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector + trading_pair: Trading pair to get rules for + accounts_service: Injected accounts service + + Returns: + Trading rules including minimum order size, price increment, etc. + + Raises: + HTTPException: 404 if account/connector/trading pair not found + """ + try: + connector = accounts_service.get_connector_instance(account_name, connector_name) + + if trading_pair not in connector.trading_rules: + raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found") + + trading_rule = connector.trading_rules[trading_pair] + return { + "trading_pair": trading_pair, + "min_order_size": float(trading_rule.min_order_size), + "max_order_size": float(trading_rule.max_order_size) if trading_rule.max_order_size else None, + "min_price_increment": float(trading_rule.min_price_increment), + "min_base_amount_increment": float(trading_rule.min_base_amount_increment), + "min_quote_amount_increment": float(trading_rule.min_quote_amount_increment), + "min_notional_size": float(trading_rule.min_notional_size), + "min_order_value": float(trading_rule.min_order_value), + "max_price_significant_digits": float(trading_rule.max_price_significant_digits), + "supports_limit_orders": trading_rule.supports_limit_orders, + "supports_market_orders": trading_rule.supports_market_orders, + "buy_order_collateral_token": trading_rule.buy_order_collateral_token, + "sell_order_collateral_token": trading_rule.sell_order_collateral_token, + } + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") + + +@router.get("/{account_name}/{connector_name}/order-types") +async def get_supported_order_types(account_name: str, connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get order types supported by a specific connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector + accounts_service: Injected accounts service + + Returns: + List of supported order types (LIMIT, MARKET, LIMIT_MAKER) + + Raises: + HTTPException: 404 if account or connector not found + """ + try: + connector = accounts_service.get_connector_instance(account_name, connector_name) + return [order_type.name for order_type in connector.supported_order_types()] + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") \ No newline at end of file From 2c4f9666e1dd573bf081753bc4d83aa6d3fd3265 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 14 Jun 2025 08:22:08 +0200 Subject: [PATCH 092/244] (feat) use orders repository --- services/orders_recorder.py | 192 +++++++++++++++++++++--------------- 1 file changed, 110 insertions(+), 82 deletions(-) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index fbd3af52..672e8206 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -1,27 +1,20 @@ import asyncio import logging from typing import Any, Optional, Union -from decimal import Decimal from datetime import datetime +from decimal import Decimal from hummingbot.core.event.event_forwarder import SourceInfoEventForwarder from hummingbot.core.event.events import ( - OrderType, TradeType, BuyOrderCreatedEvent, SellOrderCreatedEvent, OrderFilledEvent, - OrderCancelledEvent, - MarketEvent, - BuyOrderCompletedEvent, - SellOrderCompletedEvent, - MarketOrderFailureEvent + MarketEvent ) from hummingbot.connector.connector_base import ConnectorBase -from sqlalchemy import select -from database import AsyncDatabaseManager -from database.models import Order, Trade +from database import AsyncDatabaseManager, OrderRepository, TradeRepository class OrdersRecorder: @@ -93,6 +86,18 @@ async def stop(self): logging.info(f"OrdersRecorder stopped for {self.account_name}/{self.connector_name}") + def _extract_error_message(self, event) -> str: + """Extract error message from various possible event attributes.""" + # Try different possible attribute names for error messages + for attr_name in ['error_message', 'message', 'reason', 'failure_reason', 'error']: + if hasattr(event, attr_name): + error_value = getattr(event, attr_name) + if error_value: + return str(error_value) + + # If no error message found, create a descriptive one + return f"Order failed: {event.__class__.__name__}" + def _did_create_order(self, event_tag: int, market: ConnectorBase, event: Union[BuyOrderCreatedEvent, SellOrderCreatedEvent]): """Handle order creation events - called by SourceInfoEventForwarder""" logging.info(f"OrdersRecorder: _did_create_order called for order {getattr(event, 'order_id', 'unknown')}") @@ -137,19 +142,19 @@ async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrd logging.info(f"OrdersRecorder: _handle_order_created started for order {event.order_id}") try: async with self.db_manager.get_session_context() as session: - order = Order( - client_order_id=event.order_id, - account_name=self.account_name, - connector_name=self.connector_name, - trading_pair=event.trading_pair, - trade_type=trade_type.name, - order_type=event.order_type.name if hasattr(event, 'order_type') else 'UNKNOWN', - amount=float(event.amount), - price=float(event.price) if event.price else None, - status="SUBMITTED" - ) - session.add(order) - await session.commit() + order_repo = OrderRepository(session) + order_data = { + "client_order_id": event.order_id, + "account_name": self.account_name, + "connector_name": self.connector_name, + "trading_pair": event.trading_pair, + "trade_type": trade_type.name, + "order_type": event.order_type.name if hasattr(event, 'order_type') else 'UNKNOWN', + "amount": float(event.amount), + "price": float(event.price) if event.price else None, + "status": "SUBMITTED" + } + await order_repo.create_order(order_data) logging.info(f"OrdersRecorder: Successfully recorded order created: {event.order_id}") except Exception as e: @@ -159,35 +164,10 @@ async def _handle_order_filled(self, event: OrderFilledEvent): """Handle order fill events""" try: async with self.db_manager.get_session_context() as session: - # Update order with fill information - result = await session.execute( - select(Order).where(Order.client_order_id == event.order_id) - ) - order = result.scalar_one_or_none() - if order: - order.filled_amount = float(event.amount) - order.average_fill_price = float(event.price) - order.status = "FILLED" if event.amount >= Decimal(str(order.amount)) else "PARTIALLY_FILLED" - - # Calculate fee properly using the same method as MarketsRecorder - if event.trade_fee: - try: - base_asset, quote_asset = event.trading_pair.split("-") - fee_in_quote = event.trade_fee.fee_amount_in_token( - trading_pair=event.trading_pair, - price=event.price, - order_amount=event.amount, - token=quote_asset, - exchange=self._connector - ) - order.fee_paid = float(fee_in_quote) - order.fee_currency = quote_asset - except Exception as e: - logging.error(f"Error calculating fee in quote: {e}, will be stored as 0") - order.fee_paid = 0 - order.fee_currency = None + order_repo = OrderRepository(session) + trade_repo = TradeRepository(session) - # Create trade record + # Calculate fees trade_fee_paid = 0 trade_fee_currency = None @@ -208,19 +188,29 @@ async def _handle_order_filled(self, event: OrderFilledEvent): trade_fee_paid = 0 trade_fee_currency = None - trade = Trade( - order_id=order.id if order else None, - trade_id=f"{event.order_id}_{event.timestamp}", - timestamp=datetime.fromtimestamp(event.timestamp), - trading_pair=event.trading_pair, - trade_type=event.trade_type.name, - amount=float(event.amount), - price=float(event.price), - fee_paid=trade_fee_paid, + # Update order with fill information + order = await order_repo.update_order_fill( + client_order_id=event.order_id, + filled_amount=Decimal(str(event.amount)), + average_fill_price=Decimal(str(event.price)), + fee_paid=Decimal(str(trade_fee_paid)) if trade_fee_paid else None, fee_currency=trade_fee_currency ) - session.add(trade) - await session.commit() + + # Create trade record + if order: + trade_data = { + "order_id": order.id, + "trade_id": f"{event.order_id}_{event.timestamp}", + "timestamp": datetime.fromtimestamp(event.timestamp), + "trading_pair": event.trading_pair, + "trade_type": event.trade_type.name, + "amount": float(event.amount), + "price": float(event.price), + "fee_paid": trade_fee_paid, + "fee_currency": trade_fee_currency + } + await trade_repo.create_trade(trade_data) logging.debug(f"Recorded order fill: {event.order_id} - {event.amount} @ {event.price}") except Exception as e: @@ -230,13 +220,11 @@ async def _handle_order_cancelled(self, event: Any): """Handle order cancellation events""" try: async with self.db_manager.get_session_context() as session: - result = await session.execute( - select(Order).where(Order.client_order_id == event.order_id) + order_repo = OrderRepository(session) + await order_repo.update_order_status( + client_order_id=event.order_id, + status="CANCELLED" ) - order = result.scalar_one_or_none() - if order: - order.status = "CANCELLED" - await session.commit() logging.debug(f"Recorded order cancelled: {event.order_id}") except Exception as e: @@ -246,16 +234,59 @@ async def _handle_order_failed(self, event: Any): """Handle order failure events""" try: async with self.db_manager.get_session_context() as session: - result = await session.execute( - select(Order).where(Order.client_order_id == event.order_id) - ) - order = result.scalar_one_or_none() - if order: - order.status = "FAILED" - order.error_message = getattr(event, 'error_message', None) - await session.commit() + order_repo = OrderRepository(session) + + # Check if order exists, if not try to get details from connector's tracked orders + existing_order = await order_repo.get_order_by_client_id(event.order_id) + if existing_order: + # Extract error message from various possible attributes + error_msg = self._extract_error_message(event) + + # Update existing order with failure status and error message + await order_repo.update_order_status( + client_order_id=event.order_id, + status="FAILED", + error_message=error_msg + ) + logging.info(f"Updated existing order {event.order_id} to FAILED status") + else: + # Try to get order details from connector's tracked orders + order_details = self._get_order_details_from_connector(event.order_id) + if order_details: + logging.info(f"Retrieved order details from connector for {event.order_id}: {order_details}") + + # Create order record as FAILED with available details + if order_details: + order_data = { + "client_order_id": event.order_id, + "account_name": self.account_name, + "connector_name": self.connector_name, + "trading_pair": order_details["trading_pair"], + "trade_type": order_details["trade_type"], + "order_type": order_details["order_type"], + "amount": order_details["amount"], + "price": order_details["price"], + "status": "FAILED", + "error_message": self._extract_error_message(event) + } + else: + # Fallback with minimal details + order_data = { + "client_order_id": event.order_id, + "account_name": self.account_name, + "connector_name": self.connector_name, + "trading_pair": "UNKNOWN", + "trade_type": "UNKNOWN", + "order_type": "UNKNOWN", + "amount": 0.0, + "price": None, + "status": "FAILED", + "error_message": self._extract_error_message(event) + } + + await order_repo.create_order(order_data) + logging.info(f"Created failed order record for {event.order_id}") - logging.debug(f"Recorded order failed: {event.order_id}") except Exception as e: logging.error(f"Error recording order failure: {e}") @@ -263,14 +294,11 @@ async def _handle_order_completed(self, event: Any): """Handle order completion events""" try: async with self.db_manager.get_session_context() as session: - result = await session.execute( - select(Order).where(Order.client_order_id == event.order_id) - ) - order = result.scalar_one_or_none() + order_repo = OrderRepository(session) + order = await order_repo.get_order_by_client_id(event.order_id) if order: order.status = "FILLED" order.exchange_order_id = getattr(event, 'exchange_order_id', None) - await session.commit() logging.debug(f"Recorded order completed: {event.order_id}") except Exception as e: From bad8601483dab1dd1073f2c0281a0b75ff486898 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 14 Jun 2025 08:22:16 +0200 Subject: [PATCH 093/244] (feat) include trading router --- main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/main.py b/main.py index 5b56d962..d0d58e1d 100644 --- a/main.py +++ b/main.py @@ -27,6 +27,7 @@ market_data, performance, scripts, + trading ) # Configure logging @@ -167,6 +168,7 @@ def auth_user( # Include all routers with authentication app.include_router(docker.router, dependencies=[Depends(auth_user)]) app.include_router(accounts.router, dependencies=[Depends(auth_user)]) +app.include_router(trading.router, dependencies=[Depends(auth_user)]) app.include_router(bot_orchestration.router, dependencies=[Depends(auth_user)]) app.include_router(controllers.router, dependencies=[Depends(auth_user)]) app.include_router(scripts.router, dependencies=[Depends(auth_user)]) From 72ae8895438b575c9a4770f393b7a93e0998048b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:04:23 +0200 Subject: [PATCH 094/244] (feat) add patch to save yml --- patches/__init__.py | 6 ++++ patches/config_helpers_patch.py | 49 +++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+) create mode 100644 patches/__init__.py create mode 100644 patches/config_helpers_patch.py diff --git a/patches/__init__.py b/patches/__init__.py new file mode 100644 index 00000000..0d281fbf --- /dev/null +++ b/patches/__init__.py @@ -0,0 +1,6 @@ +""" +Patches for third-party libraries used in the backend API +""" +from .config_helpers_patch import apply_config_helpers_patch, remove_config_helpers_patch + +__all__ = ['apply_config_helpers_patch', 'remove_config_helpers_patch'] \ No newline at end of file diff --git a/patches/config_helpers_patch.py b/patches/config_helpers_patch.py new file mode 100644 index 00000000..914a12ce --- /dev/null +++ b/patches/config_helpers_patch.py @@ -0,0 +1,49 @@ +""" +Patch for Hummingbot's config_helpers.py to handle missing directories +""" +import logging +from pathlib import Path + +from hummingbot.client.config.config_helpers import ClientConfigAdapter + + +def patched_save_to_yml(yml_path: Path, cm: ClientConfigAdapter): + """ + Patched version of save_to_yml that creates directories if they don't exist + """ + try: + # Ensure the parent directory exists + yml_path.parent.mkdir(parents=True, exist_ok=True) + + cm_yml_str = cm.generate_yml_output_str_with_comments() + with open(yml_path, "w", encoding="utf-8") as outfile: + outfile.write(cm_yml_str) + except Exception as e: + logging.getLogger().error("Error writing configs: %s" % (str(e),), exc_info=True) + + +def apply_config_helpers_patch(): + """ + Apply the patch to hummingbot.client.config.config_helpers + """ + import hummingbot.client.config.config_helpers as config_helpers + + # Store the original function in case we need it + config_helpers._original_save_to_yml = config_helpers.save_to_yml + + # Replace with our patched version + config_helpers.save_to_yml = patched_save_to_yml + + logging.info("Applied config_helpers patch: save_to_yml now creates missing directories") + + +def remove_config_helpers_patch(): + """ + Remove the patch and restore original functionality + """ + import hummingbot.client.config.config_helpers as config_helpers + + if hasattr(config_helpers, '_original_save_to_yml'): + config_helpers.save_to_yml = config_helpers._original_save_to_yml + delattr(config_helpers, '_original_save_to_yml') + logging.info("Removed config_helpers patch: restored original save_to_yml") \ No newline at end of file From 1dc8c464fa31c0c40e9b5f3d4a8a0961e95f354e Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:04:38 +0200 Subject: [PATCH 095/244] (feat) apply patches --- main.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/main.py b/main.py index d0d58e1d..ff5aeb14 100644 --- a/main.py +++ b/main.py @@ -45,6 +45,10 @@ # Load environment variables early load_dotenv() +# Apply patches for third-party libraries +from patches import apply_config_helpers_patch +apply_config_helpers_patch() + # Get settings from Pydantic Settings username = settings.security.username password = settings.security.password From 658a25cf8c2e2fe849081324d69bd793604eaa91 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:04:55 +0200 Subject: [PATCH 096/244] (feat) improve mqtt message handling --- utils/mqtt_manager.py | 72 +++++++++++++++++++++---------------------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/utils/mqtt_manager.py b/utils/mqtt_manager.py index f6b0c479..8dbc0ee5 100644 --- a/utils/mqtt_manager.py +++ b/utils/mqtt_manager.py @@ -62,49 +62,49 @@ def __init__(self, host: str, port: int, username: str, password: str): @asynccontextmanager async def _get_client(self): - """Get MQTT client with automatic reconnection.""" + """Get MQTT client for a single connection attempt.""" + client_id = f"backend-api-{int(time.time())}" + + # Create client with credentials if provided + if self.username and self.password: + client = aiomqtt.Client( + hostname=self.host, + port=self.port, + username=self.username, + password=self.password, + identifier=client_id, + keepalive=60, + ) + else: + client = aiomqtt.Client(hostname=self.host, port=self.port, identifier=client_id, keepalive=60) + + async with client: + self._connected = True + logger.info(f"✓ Connected to MQTT broker at {self.host}:{self.port}") + + # Subscribe to topics + for topic, qos in self._subscriptions: + await client.subscribe(topic, qos=qos) + yield client + + # Cleanup on exit + self._connected = False + + async def _handle_messages(self): + """Main message handling loop with reconnection.""" while True: try: - client_id = f"backend-api-{int(time.time())}" - - # Create client with credentials if provided - if self.username and self.password: - client = aiomqtt.Client( - hostname=self.host, - port=self.port, - username=self.username, - password=self.password, - identifier=client_id, - keepalive=60, - ) - else: - client = aiomqtt.Client(hostname=self.host, port=self.port, identifier=client_id, keepalive=60) - - async with client: - self._connected = True - logger.info(f"✓ Connected to MQTT broker at {self.host}:{self.port}") - - # Subscribe to topics - for topic, qos in self._subscriptions: - await client.subscribe(topic, qos=qos) - yield client - + async with self._get_client() as client: + self._client = client + async for message in client.messages: + await self._process_message(message) except aiomqtt.MqttError as error: - self._connected = False - logger.error(f'MQTT Error "{error}". Reconnecting in {self._reconnect_interval} seconds.') + logger.error(f'MQTT disconnected during message iteration: "{error}". Reconnecting...') await asyncio.sleep(self._reconnect_interval) except Exception as e: - self._connected = False - logger.error(f"Unexpected error: {e}. Reconnecting in {self._reconnect_interval} seconds.") + logger.error(f"Unexpected error in message handler: {e}. Reconnecting...") await asyncio.sleep(self._reconnect_interval) - async def _handle_messages(self): - """Main message handling loop.""" - async with self._get_client() as client: - self._client = client - async for message in client.messages: - await self._process_message(message) - async def _process_message(self, message): """Process incoming MQTT message.""" try: From 5c16e71323e11be5cd6e3e49f494bbe78565224f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:05:07 +0200 Subject: [PATCH 097/244] (feat) add set position mode --- services/accounts_service.py | 47 +++++++++++++++++++++++++++++++++++- 1 file changed, 46 insertions(+), 1 deletion(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index e28189ac..4fcd3b3b 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -6,7 +6,7 @@ from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction +from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction, PositionMode from config import settings from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository @@ -717,6 +717,51 @@ async def set_leverage(self, account_name: str, connector_name: str, logging.error(f"Failed to set leverage for {trading_pair} to {leverage}: {e}") raise HTTPException(status_code=500, detail=f"Failed to set leverage: {str(e)}") + async def set_position_mode(self, account_name: str, connector_name: str, + position_mode: PositionMode) -> Dict[str, str]: + """ + Set position mode for a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector (must be perpetual) + position_mode: PositionMode.HEDGE or PositionMode.ONEWAY + + Returns: + Dictionary with success status and message + + Raises: + HTTPException: If account/connector not found, not perpetual, or operation fails + """ + # Validate this is a perpetual connector + if "_perpetual" not in connector_name: + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") + + connector = self.get_connector_instance(account_name, connector_name) + + # Check if the requested position mode is supported + supported_modes = connector.supported_position_modes() + if position_mode not in supported_modes: + supported_values = [mode.value for mode in supported_modes] + raise HTTPException( + status_code=400, + detail=f"Position mode '{position_mode.value}' not supported. Supported modes: {supported_values}" + ) + + try: + # Try to call the method - it might be sync or async + result = connector.set_position_mode(position_mode) + # If it's a coroutine, await it + if asyncio.iscoroutine(result): + await result + + message = f"Position mode set to {position_mode.value} on {connector_name}" + logging.info(f"Set position mode to {position_mode.value} on {connector_name} (Account: {account_name})") + return {"status": "success", "message": message} + + except Exception as e: + logging.error(f"Failed to set position mode to {position_mode.value}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to set position mode: {str(e)}") async def get_orders(self, account_name: Optional[str] = None, market: Optional[str] = None, symbol: Optional[str] = None, status: Optional[str] = None, From 94645f3f47d5c10fc8c187aa5a8828a653f5f7fe Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:05:20 +0200 Subject: [PATCH 098/244] (feat) improve orders recorder --- services/orders_recorder.py | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 672e8206..288cfc9d 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -149,10 +149,11 @@ async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrd "connector_name": self.connector_name, "trading_pair": event.trading_pair, "trade_type": trade_type.name, - "order_type": event.order_type.name if hasattr(event, 'order_type') else 'UNKNOWN', + "order_type": event.type.name if hasattr(event, 'type') else 'UNKNOWN', "amount": float(event.amount), "price": float(event.price) if event.price else None, - "status": "SUBMITTED" + "status": "SUBMITTED", + "exchange_order_id": getattr(event, 'exchange_order_id', None) } await order_repo.create_order(order_data) @@ -230,6 +231,23 @@ async def _handle_order_cancelled(self, event: Any): except Exception as e: logging.error(f"Error recording order cancellation: {e}") + def _get_order_details_from_connector(self, order_id: str) -> Optional[dict]: + """Try to get order details from connector's tracked orders""" + try: + if self._connector and hasattr(self._connector, 'in_flight_orders'): + in_flight_order = self._connector.in_flight_orders.get(order_id) + if in_flight_order: + return { + "trading_pair": in_flight_order.trading_pair, + "trade_type": in_flight_order.trade_type.name, + "order_type": in_flight_order.order_type.name, + "amount": float(in_flight_order.amount), + "price": float(in_flight_order.price) if in_flight_order.price else None + } + except Exception as e: + logging.error(f"Error getting order details from connector: {e}") + return None + async def _handle_order_failed(self, event: Any): """Handle order failure events""" try: From 7599116690c1f9560eadb4c699a1fc7bb787a791 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:05:33 +0200 Subject: [PATCH 099/244] (feat) add set position mode route --- routers/accounts.py | 42 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/routers/accounts.py b/routers/accounts.py index ef6393a6..0229447e 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -3,6 +3,8 @@ from fastapi import APIRouter, HTTPException, Depends, Query from hummingbot.client.settings import AllConnectorSettings +from hummingbot.core.data_type.common import PositionMode +from pydantic import BaseModel from starlette import status from services.accounts_service import AccountsService @@ -186,3 +188,43 @@ async def list_account_credentials(account_name: str, accounts_service: Accounts except Exception as e: raise HTTPException(status_code=500, detail=str(e)) + +class PositionModeRequest(BaseModel): + position_mode: str + +@router.post("/{account_name}/{connector_name}/position-mode") +async def set_position_mode( + account_name: str, + connector_name: str, + request: PositionModeRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Set position mode for a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + position_mode: Position mode to set (HEDGE or ONEWAY) + + Returns: + Success message with status + + Raises: + HTTPException: 400 if not a perpetual connector or invalid position mode + """ + try: + # Convert string to PositionMode enum + mode = PositionMode[request.position_mode.upper()] + result = await accounts_service.set_position_mode(account_name, connector_name, mode) + return result + except KeyError: + raise HTTPException( + status_code=400, + detail=f"Invalid position mode '{request.position_mode}'. Must be 'HEDGE' or 'ONEWAY'" + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + From b8e1af89f4358b0c0721adb449363c19084e46d1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 18 Jun 2025 18:57:09 +0200 Subject: [PATCH 100/244] (feat) add functionality to get the current position mod --- routers/accounts.py | 28 ++++++++++++++++++++++++++++ services/accounts_service.py | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 64 insertions(+) diff --git a/routers/accounts.py b/routers/accounts.py index 0229447e..25458065 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -228,3 +228,31 @@ async def set_position_mode( except Exception as e: raise HTTPException(status_code=500, detail=str(e)) + +@router.get("/{account_name}/{connector_name}/position-mode") +async def get_position_mode( + account_name: str, + connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get current position mode for a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + + Returns: + Dictionary with current position mode, connector name, and account name + + Raises: + HTTPException: 400 if not a perpetual connector + """ + try: + result = await accounts_service.get_position_mode(account_name, connector_name) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + diff --git a/services/accounts_service.py b/services/accounts_service.py index 4fcd3b3b..f25d8def 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -763,6 +763,42 @@ async def set_position_mode(self, account_name: str, connector_name: str, logging.error(f"Failed to set position mode to {position_mode.value}: {e}") raise HTTPException(status_code=500, detail=f"Failed to set position mode: {str(e)}") + async def get_position_mode(self, account_name: str, connector_name: str) -> Dict[str, str]: + """ + Get current position mode for a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector (must be perpetual) + + Returns: + Dictionary with current position mode + + Raises: + HTTPException: If account/connector not found, not perpetual, or operation fails + """ + # Validate this is a perpetual connector + if "_perpetual" not in connector_name: + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") + + connector = self.get_connector_instance(account_name, connector_name) + + # Check if connector has position mode functionality + if not hasattr(connector, 'position_mode'): + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' does not support position mode") + + try: + current_mode = connector.position_mode + return { + "position_mode": current_mode.value if current_mode else "UNKNOWN", + "connector": connector_name, + "account": account_name + } + + except Exception as e: + logging.error(f"Failed to get position mode: {e}") + raise HTTPException(status_code=500, detail=f"Failed to get position mode: {str(e)}") + async def get_orders(self, account_name: Optional[str] = None, market: Optional[str] = None, symbol: Optional[str] = None, status: Optional[str] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, From a2f007c562b0677c30175dbcc234b533acba2d1a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 00:36:41 +0200 Subject: [PATCH 101/244] (feat) improve position mode and orders access --- routers/trading.py | 6 +++--- services/accounts_service.py | 38 ++++++++++++++++++------------------ 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index 951b0efc..254a4123 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -220,7 +220,7 @@ async def get_connector_active_orders(account_name: str, connector_name: str, HTTPException: 404 if account or connector not found """ try: - return accounts_service.get_active_orders(account_name, connector_name) + return await accounts_service.get_active_orders(account_name, connector_name) except HTTPException: raise except Exception as e: @@ -583,7 +583,7 @@ async def get_trading_rules(account_name: str, connector_name: str, trading_pair HTTPException: 404 if account/connector/trading pair not found """ try: - connector = accounts_service.get_connector_instance(account_name, connector_name) + connector = await accounts_service.get_connector_instance(account_name, connector_name) if trading_pair not in connector.trading_rules: raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found") @@ -628,7 +628,7 @@ async def get_supported_order_types(account_name: str, connector_name: str, HTTPException: 404 if account or connector not found """ try: - connector = accounts_service.get_connector_instance(account_name, connector_name) + connector = await accounts_service.get_connector_instance(account_name, connector_name) return [order_type.name for order_type in connector.supported_order_types()] except HTTPException: raise diff --git a/services/accounts_service.py b/services/accounts_service.py index f25d8def..6b47b334 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -41,7 +41,6 @@ def __init__(self, default_quote: Default quote currency for trading pairs (default: "USDT") """ self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) - self.connector_manager = ConnectorManager(self.secrets_manager) self.accounts_state = {} self.update_account_state_interval = account_update_interval * 60 self.default_quote = default_quote @@ -50,6 +49,9 @@ def __init__(self, # Database setup for account states and orders self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False + + # Initialize connector manager with db_manager + self.connector_manager = ConnectorManager(self.secrets_manager, self.db_manager) async def ensure_db_initialized(self): """Ensure database is initialized before using it.""" @@ -177,11 +179,9 @@ async def _ensure_account_connectors_initialized(self, account_name: str): try: # Only initialize if connector doesn't exist if not self.connector_manager.is_connector_initialized(account_name, connector_name): - await self.connector_manager.initialize_connector_with_tracking( - account_name, connector_name, self.db_manager - ) + # Get connector will now handle all initialization + connector = await self.connector_manager.get_connector(account_name, connector_name) # Force initial balance update to ensure first dump has data - connector = self.connector_manager.get_connector(account_name, connector_name) await connector._update_balances() except Exception as e: @@ -261,11 +261,9 @@ async def add_credentials(self, account_name: str, connector_name: str, credenti await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) # Initialize the connector with tracking - await self.connector_manager.initialize_connector_with_tracking( - account_name, connector_name, self.db_manager - ) - # Force initial balance update to ensure first dump has data - connector = self.connector_manager.get_connector(account_name, connector_name) + # Get connector will now handle all initialization + connector = await self.connector_manager.get_connector(account_name, connector_name) + # Force initial balance update to ensure first dump has data await connector._update_balances() @staticmethod def list_accounts(): @@ -620,7 +618,7 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair logging.error(f"Failed to place {trade_type} order: {e}") raise HTTPException(status_code=500, detail=f"Failed to place trade: {str(e)}") - def get_connector_instance(self, account_name: str, connector_name: str): + async def get_connector_instance(self, account_name: str, connector_name: str): """ Get a connector instance for direct access. @@ -637,12 +635,14 @@ def get_connector_instance(self, account_name: str, connector_name: str): if account_name not in self.list_accounts(): raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - if not self.connector_manager.is_connector_initialized(account_name, connector_name): + # Check if connector credentials exist + available_credentials = self.connector_manager.list_available_credentials(account_name) + if connector_name not in available_credentials: raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") - return self.connector_manager.get_connector(account_name, connector_name) + return await self.connector_manager.get_connector(account_name, connector_name) - def get_active_orders(self, account_name: str, connector_name: str) -> Dict[str, any]: + async def get_active_orders(self, account_name: str, connector_name: str) -> Dict[str, any]: """ Get active orders for a specific connector. @@ -653,7 +653,7 @@ def get_active_orders(self, account_name: str, connector_name: str) -> Dict[str, Returns: Dictionary of active orders """ - connector = self.get_connector_instance(account_name, connector_name) + connector = await self.get_connector_instance(account_name, connector_name) return {order_id: order.to_json() for order_id, order in connector.in_flight_orders.items()} async def cancel_order(self, account_name: str, connector_name: str, @@ -670,7 +670,7 @@ async def cancel_order(self, account_name: str, connector_name: str, Returns: Client order ID that was cancelled """ - connector = self.get_connector_instance(account_name, connector_name) + connector = await self.get_connector_instance(account_name, connector_name) try: result = connector.cancel(trading_pair=trading_pair, client_order_id=client_order_id) @@ -701,7 +701,7 @@ async def set_leverage(self, account_name: str, connector_name: str, if "_perpetual" not in connector_name: raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") - connector = self.get_connector_instance(account_name, connector_name) + connector = await self.get_connector_instance(account_name, connector_name) # Check if connector has leverage functionality if not hasattr(connector, '_execute_set_leverage'): @@ -737,7 +737,7 @@ async def set_position_mode(self, account_name: str, connector_name: str, if "_perpetual" not in connector_name: raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") - connector = self.get_connector_instance(account_name, connector_name) + connector = await self.get_connector_instance(account_name, connector_name) # Check if the requested position mode is supported supported_modes = connector.supported_position_modes() @@ -781,7 +781,7 @@ async def get_position_mode(self, account_name: str, connector_name: str) -> Dic if "_perpetual" not in connector_name: raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") - connector = self.get_connector_instance(account_name, connector_name) + connector = await self.get_connector_instance(account_name, connector_name) # Check if connector has position mode functionality if not hasattr(connector, 'position_mode'): From c9748c9de93621caf42d5ce464e26c192cb7237c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 00:37:25 +0200 Subject: [PATCH 102/244] (feat) unify ways to initialize connectors --- utils/connector_manager.py | 73 ++++++++++++++++++++++++++++++-------- 1 file changed, 59 insertions(+), 14 deletions(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 26a5f4a4..b1795040 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -7,8 +7,8 @@ from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class from hummingbot.client.settings import AllConnectorSettings from hummingbot.connector.connector_base import ConnectorBase -from hummingbot.connector.exchange_base import ExchangeBase from hummingbot.connector.exchange_py_base import ExchangePyBase +from hummingbot.core.data_type.common import PositionMode from hummingbot.core.utils.async_utils import safe_ensure_future from utils.backend_api_config_adapter import BackendAPIConfigAdapter @@ -23,16 +23,18 @@ class ConnectorManager: This is the single source of truth for all connector instances. """ - def __init__(self, secrets_manager: ETHKeyFileSecretManger): + def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): self.secrets_manager = secrets_manager + self.db_manager = db_manager self._connector_cache: Dict[str, ConnectorBase] = {} self._orders_recorders: Dict[str, any] = {} self._file_system = FileSystemUtil() - def get_connector(self, account_name: str, connector_name: str): + async def get_connector(self, account_name: str, connector_name: str): """ Get the connector object for the specified account and connector. Uses caching to avoid recreating connectors unnecessarily. + Ensures proper initialization including position mode setup. :param account_name: The name of the account. :param connector_name: The name of the connector. @@ -44,7 +46,8 @@ def get_connector(self, account_name: str, connector_name: str): return self._connector_cache[cache_key] try: - connector = self._create_connector(account_name, connector_name) + # Create connector with full initialization + connector = await self._create_and_initialize_connector(account_name, connector_name) self._connector_cache[cache_key] = connector return connector except Exception as e: @@ -190,28 +193,28 @@ def get_connector_state(self, account_name: str, connector_name: str) -> Dict[st "trading_required": connector.is_trading_required } - async def initialize_connector_with_tracking(self, account_name: str, connector_name: str, db_manager=None) -> ConnectorBase: + async def _create_and_initialize_connector(self, account_name: str, connector_name: str) -> ConnectorBase: """ - Initialize a connector with order tracking infrastructure. - This includes creating the connector, starting its network, and setting up order recording. + Create and fully initialize a connector with all necessary setup. + This includes creating the connector, starting its network, setting up order recording, + and configuring position mode for perpetual connectors. :param account_name: The name of the account. :param connector_name: The name of the connector. - :param db_manager: Database manager for order recording (optional). :return: The initialized connector instance. """ - # Get or create the connector - connector = self.get_connector(account_name, connector_name) + # Create the base connector + connector = self._create_connector(account_name, connector_name) - # Start order tracking if db_manager provided - if db_manager: + # Start order tracking if db_manager is available + if self.db_manager: cache_key = f"{account_name}:{connector_name}" if cache_key not in self._orders_recorders: # Import OrdersRecorder dynamically to avoid circular imports from services.orders_recorder import OrdersRecorder # Create and start orders recorder - orders_recorder = OrdersRecorder(db_manager, account_name, connector_name) + orders_recorder = OrdersRecorder(self.db_manager, account_name, connector_name) orders_recorder.start(connector) self._orders_recorders[cache_key] = orders_recorder @@ -221,9 +224,25 @@ async def initialize_connector_with_tracking(self, account_name: str, connector_ # Update initial balances await connector._update_balances() - logging.info(f"Initialized connector {connector_name} for account {account_name} with tracking") + # Set default position mode to HEDGE for perpetual connectors + await self._set_default_position_mode(connector) + + logging.info(f"Initialized connector {connector_name} for account {account_name}") return connector + async def initialize_connector_with_tracking(self, account_name: str, connector_name: str, db_manager=None) -> ConnectorBase: + """ + DEPRECATED: Use get_connector() instead. + This method is kept for backward compatibility but just calls get_connector(). + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :param db_manager: Database manager (ignored, use constructor instead). + :return: The initialized connector instance. + """ + logging.warning(f"initialize_connector_with_tracking is deprecated, use get_connector() instead") + return await self.get_connector(account_name, connector_name) + def _start_network_without_order_book(self, connector: ExchangePyBase): """ Start connector network tasks except the order book tracker. @@ -243,6 +262,32 @@ def _start_network_without_order_book(self, connector: ExchangePyBase): except Exception as e: logging.error(f"Error starting connector network without order book: {e}") + async def _set_default_position_mode(self, connector): + """ + Set default position mode to HEDGE for perpetual connectors that support position modes. + + :param connector: The connector instance + """ + try: + # Check if this is a perpetual connector + if "_perpetual" in connector.name and hasattr(connector, 'set_position_mode'): + # Check if HEDGE mode is supported + if hasattr(connector, 'supported_position_modes'): + supported_modes = connector.supported_position_modes() + if PositionMode.HEDGE in supported_modes: + # Try to call the method - it might be sync or async + result = connector.set_position_mode(PositionMode.HEDGE) + # If it's a coroutine, await it + if asyncio.iscoroutine(result): + await result + logging.info(f"Set default position mode to HEDGE for {connector.name}") + else: + logging.info(f"HEDGE mode not supported for {connector.name}, skipping position mode setup") + else: + logging.info(f"Position modes not supported for {connector.name}, skipping position mode setup") + except Exception as e: + logging.warning(f"Failed to set default position mode for {connector.name}: {e}") + async def stop_connector(self, account_name: str, connector_name: str): """ Stop a connector and its associated services. From 9e19a909360fc03bb3241baf0dede9cf17dbe600 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 07:43:23 +0200 Subject: [PATCH 103/244] (feat) move trading rules to market data --- routers/market_data.py | 121 ++++++++++++++++++++++++++- routers/trading.py | 45 ---------- services/market_data_feed_manager.py | 19 +++-- 3 files changed, 133 insertions(+), 52 deletions(-) diff --git a/routers/market_data.py b/routers/market_data.py index ce2adeff..95a7cebf 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -1,6 +1,7 @@ import asyncio +from typing import Dict, List, Optional -from fastapi import APIRouter, Request +from fastapi import APIRouter, Request, HTTPException from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig from services.market_data_feed_manager import MarketDataFeedManager @@ -118,3 +119,121 @@ async def get_market_data_settings(): "feed_timeout": settings.market_data.feed_timeout, "description": "cleanup_interval: seconds between cleanup runs, feed_timeout: seconds before unused feeds expire" } + + +# Trading Rules Endpoints +@router.get("/trading-rules/{connector}") +async def get_all_trading_rules(request: Request, connector: str): + """ + Get trading rules for all available trading pairs on a connector. + + This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + which means no authentication or account setup is required. + + Args: + request: FastAPI request object + connector: Name of the connector (e.g., 'binance', 'binance_perpetual') + + Returns: + Dictionary mapping trading pairs to their trading rules + + Raises: + HTTPException: 404 if connector not found, 500 for other errors + """ + try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Get trading rules for all pairs + rules = await market_data_feed_manager.get_trading_rules(connector) + + if "error" in rules: + raise HTTPException(status_code=404, detail=f"Connector '{connector}' not found or error: {rules['error']}") + + return rules + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") + + +@router.get("/trading-rules/{connector}/{trading_pair}") +async def get_trading_rules_for_pair(request: Request, connector: str, trading_pair: str): + """ + Get trading rules for a specific trading pair on a connector. + + This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + which means no authentication or account setup is required. + + Args: + request: FastAPI request object + connector: Name of the connector (e.g., 'binance', 'binance_perpetual') + trading_pair: Trading pair to get rules for (e.g., 'BTC-USDT') + + Returns: + Trading rules including minimum order size, price increment, etc. + + Raises: + HTTPException: 404 if connector or trading pair not found, 500 for other errors + """ + try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Get trading rules for specific pair + rules = await market_data_feed_manager.get_trading_rules(connector, [trading_pair]) + + if "error" in rules: + raise HTTPException(status_code=404, detail=f"Connector '{connector}' not found or error: {rules['error']}") + + if trading_pair not in rules: + raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found on {connector}") + + if "error" in rules[trading_pair]: + raise HTTPException(status_code=404, detail=rules[trading_pair]["error"]) + + return rules[trading_pair] + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") + + +@router.get("/supported-order-types/{connector}") +async def get_supported_order_types(request: Request, connector: str): + """ + Get order types supported by a specific connector. + + This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + which means no authentication or account setup is required. + + Args: + request: FastAPI request object + connector: Name of the connector (e.g., 'binance', 'binance_perpetual') + + Returns: + List of supported order types (LIMIT, MARKET, LIMIT_MAKER) + + Raises: + HTTPException: 404 if connector not found, 500 for other errors + """ + try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Access connector through MarketDataProvider's _rate_sources + connector_instance = market_data_feed_manager.market_data_provider._rate_sources.get(connector) + + if not connector_instance: + raise HTTPException(status_code=404, detail=f"Connector '{connector}' not found") + + # Get supported order types + if hasattr(connector_instance, 'supported_order_types'): + order_types = [order_type.name for order_type in connector_instance.supported_order_types()] + return {"connector": connector, "supported_order_types": order_types} + else: + raise HTTPException(status_code=404, detail=f"Connector '{connector}' does not support order types query") + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") diff --git a/routers/trading.py b/routers/trading.py index 254a4123..31dcf674 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -564,51 +564,6 @@ async def get_account_trades( # Trading Rules & Configuration -@router.get("/{account_name}/{connector_name}/rules/{trading_pair}") -async def get_trading_rules(account_name: str, connector_name: str, trading_pair: str, - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get trading rules for a specific trading pair on a connector. - - Args: - account_name: Name of the account - connector_name: Name of the connector - trading_pair: Trading pair to get rules for - accounts_service: Injected accounts service - - Returns: - Trading rules including minimum order size, price increment, etc. - - Raises: - HTTPException: 404 if account/connector/trading pair not found - """ - try: - connector = await accounts_service.get_connector_instance(account_name, connector_name) - - if trading_pair not in connector.trading_rules: - raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found") - - trading_rule = connector.trading_rules[trading_pair] - return { - "trading_pair": trading_pair, - "min_order_size": float(trading_rule.min_order_size), - "max_order_size": float(trading_rule.max_order_size) if trading_rule.max_order_size else None, - "min_price_increment": float(trading_rule.min_price_increment), - "min_base_amount_increment": float(trading_rule.min_base_amount_increment), - "min_quote_amount_increment": float(trading_rule.min_quote_amount_increment), - "min_notional_size": float(trading_rule.min_notional_size), - "min_order_value": float(trading_rule.min_order_value), - "max_price_significant_digits": float(trading_rule.max_price_significant_digits), - "supports_limit_orders": trading_rule.supports_limit_orders, - "supports_market_orders": trading_rule.supports_market_orders, - "buy_order_collateral_token": trading_rule.buy_order_collateral_token, - "sell_order_collateral_token": trading_rule.sell_order_collateral_token, - } - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") - @router.get("/{account_name}/{connector_name}/order-types") async def get_supported_order_types(account_name: str, connector_name: str, diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py index 9ca152bd..c15e26c0 100644 --- a/services/market_data_feed_manager.py +++ b/services/market_data_feed_manager.py @@ -184,8 +184,9 @@ async def get_trading_rules(self, connector_name: str, trading_pairs: Optional[L # Access connector through MarketDataProvider's _rate_sources LazyDict connector = self.market_data_provider._rate_sources[connector_name] - # Update trading rules to ensure we have the latest data - await connector._update_trading_rules() + # Check if trading rules are initialized, if not update them + if not connector.trading_rules or len(connector.trading_rules) == 0: + await connector._update_trading_rules() # Get trading rules if trading_pairs: @@ -199,11 +200,14 @@ async def get_trading_rules(self, connector_name: str, trading_pairs: Optional[L "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, "min_price_increment": float(rule.min_price_increment), "min_base_amount_increment": float(rule.min_base_amount_increment), + "min_quote_amount_increment": float(rule.min_quote_amount_increment), "min_notional_size": float(rule.min_notional_size), - "max_price_significant_digits": rule.max_price_significant_digits, - "max_quantity_significant_digits": rule.max_quantity_significant_digits, + "min_order_value": float(rule.min_order_value), + "max_price_significant_digits": float(rule.max_price_significant_digits), "supports_limit_orders": rule.supports_limit_orders, "supports_market_orders": rule.supports_market_orders, + "buy_order_collateral_token": rule.buy_order_collateral_token, + "sell_order_collateral_token": rule.sell_order_collateral_token, } else: result[trading_pair] = {"error": f"Trading pair {trading_pair} not found"} @@ -216,11 +220,14 @@ async def get_trading_rules(self, connector_name: str, trading_pairs: Optional[L "max_order_size": float(rule.max_order_size) if rule.max_order_size else None, "min_price_increment": float(rule.min_price_increment), "min_base_amount_increment": float(rule.min_base_amount_increment), + "min_quote_amount_increment": float(rule.min_quote_amount_increment), "min_notional_size": float(rule.min_notional_size), - "max_price_significant_digits": rule.max_price_significant_digits, - "max_quantity_significant_digits": rule.max_quantity_significant_digits, + "min_order_value": float(rule.min_order_value), + "max_price_significant_digits": float(rule.max_price_significant_digits), "supports_limit_orders": rule.supports_limit_orders, "supports_market_orders": rule.supports_market_orders, + "buy_order_collateral_token": rule.buy_order_collateral_token, + "sell_order_collateral_token": rule.sell_order_collateral_token, } self.logger.debug(f"Retrieved trading rules for {connector_name}: {len(result)} pairs") From f3d358caaa790f5e5366715fa41e0ce17ec9f3b7 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 07:43:56 +0200 Subject: [PATCH 104/244] (feat) unify connectors initialization --- utils/connector_manager.py | 19 +++---------------- 1 file changed, 3 insertions(+), 16 deletions(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index b1795040..33aabb5c 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -1,6 +1,6 @@ import asyncio import logging -from typing import Dict, List, Optional, Tuple +from typing import Dict, List, Optional from hummingbot.client.config.client_config_map import ClientConfigMap from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger @@ -174,7 +174,7 @@ def is_connector_initialized(self, account_name: str, connector_name: str) -> bo cache_key = f"{account_name}:{connector_name}" return cache_key in self._connector_cache - def get_connector_state(self, account_name: str, connector_name: str) -> Dict[str, any]: + async def get_connector_state(self, account_name: str, connector_name: str) -> Dict[str, any]: """ Get the current state of a connector (balances, trading rules, etc). @@ -182,7 +182,7 @@ def get_connector_state(self, account_name: str, connector_name: str) -> Dict[st :param connector_name: The name of the connector. :return: Dictionary containing connector state information. """ - connector = self.get_connector(account_name, connector_name) + connector = await self.get_connector(account_name, connector_name) return { "balances": {k: float(v) for k, v in connector.get_all_balances().items()}, @@ -230,19 +230,6 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na logging.info(f"Initialized connector {connector_name} for account {account_name}") return connector - async def initialize_connector_with_tracking(self, account_name: str, connector_name: str, db_manager=None) -> ConnectorBase: - """ - DEPRECATED: Use get_connector() instead. - This method is kept for backward compatibility but just calls get_connector(). - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :param db_manager: Database manager (ignored, use constructor instead). - :return: The initialized connector instance. - """ - logging.warning(f"initialize_connector_with_tracking is deprecated, use get_connector() instead") - return await self.get_connector(account_name, connector_name) - def _start_network_without_order_book(self, connector: ExchangePyBase): """ Start connector network tasks except the order book tracker. From f12815917fbc182e32b53cdc5411eeaa86c51fb1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 19:29:09 +0200 Subject: [PATCH 105/244] (feat) get connector status --- services/docker_service.py | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/services/docker_service.py b/services/docker_service.py index c9718aed..7dec7e37 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -79,6 +79,21 @@ def start_container(self, container_name): except DockerException as e: return str(e) + def get_container_status(self, container_name): + """Get the status of a container""" + try: + container = self.client.containers.get(container_name) + return { + "success": True, + "state": { + "status": container.status, + "running": container.status == "running", + "exit_code": getattr(container.attrs.get("State", {}), "ExitCode", None) + } + } + except DockerException as e: + return {"success": False, "message": str(e)} + def remove_container(self, container_name, force=True): try: container = self.client.containers.get(container_name) From e8495229571ac6da59ba4b1de892527ad7bb798e Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 19:29:32 +0200 Subject: [PATCH 106/244] (feat) move leverage to accounts --- routers/accounts.py | 107 ++++++++++++++++++++++++++------------------ 1 file changed, 64 insertions(+), 43 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index 25458065..bc15f242 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -16,6 +16,11 @@ file_system = FileSystemUtil(base_path="bots/credentials") +class LeverageRequest(BaseModel): + trading_pair: str + leverage: int + + @router.get("/connectors", response_model=List[str]) @@ -43,24 +48,6 @@ async def get_connector_config_map(connector_name: str, accounts_service: Accoun return accounts_service.get_connector_config_map(connector_name) -@router.get("/all-connectors-config-map", response_model=Dict[str, List[str]]) -async def get_all_connectors_config_map(accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get configuration fields required for all connectors. - - Returns: - Dictionary mapping connector names to their required configuration fields - """ - all_connectors = list(AllConnectorSettings.get_connector_settings().keys()) - config_maps = {} - for connector_name in all_connectors: - try: - config_maps[connector_name] = accounts_service.get_connector_config_map(connector_name) - except Exception as e: - config_maps[connector_name] = [] - return config_maps - - @router.get("/", response_model=List[str]) async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): """ @@ -72,6 +59,29 @@ async def list_accounts(accounts_service: AccountsService = Depends(get_accounts return accounts_service.list_accounts() +@router.get("/{account_name}/credentials", response_model=List[str]) +async def list_account_credentials(account_name: str, + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get a list of all connectors that have credentials configured for a specific account. + + Args: + account_name: Name of the account to list credentials for + + Returns: + List of connector names that have credentials configured + + Raises: + HTTPException: 404 if account not found + """ + try: + credentials = accounts_service.list_credentials(account_name) + # Remove .yml extension from filenames + return [cred.replace('.yml', '') for cred in credentials] + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) @router.post("/add-account", status_code=status.HTTP_201_CREATED) @@ -164,31 +174,6 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di raise HTTPException(status_code=400, detail=str(e)) -# Account-specific credential management -@router.get("/{account_name}/credentials", response_model=List[str]) -async def list_account_credentials(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get a list of all connectors that have credentials configured for a specific account. - - Args: - account_name: Name of the account to list credentials for - - Returns: - List of connector names that have credentials configured - - Raises: - HTTPException: 404 if account not found - """ - try: - credentials = accounts_service.list_credentials(account_name) - # Remove .yml extension from filenames - return [cred.replace('.yml', '') for cred in credentials] - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - class PositionModeRequest(BaseModel): position_mode: str @@ -256,3 +241,39 @@ async def get_position_mode( except Exception as e: raise HTTPException(status_code=500, detail=str(e)) + +@router.post("/{account_name}/{connector_name}/leverage") +async def set_leverage( + account_name: str, + connector_name: str, + request: LeverageRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Set leverage for a specific trading pair on a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + request: Leverage request with trading pair and leverage value + accounts_service: Injected accounts service + + Returns: + Dictionary with success status and message + + Raises: + HTTPException: 400 for invalid parameters or non-perpetual connector, 404 for account/connector not found, 500 for execution errors + """ + try: + result = await accounts_service.set_leverage( + account_name=account_name, + connector_name=connector_name, + trading_pair=request.trading_pair, + leverage=request.leverage + ) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") + From 743d9dd9511e39dff2bb03cb3e71f9b7f5f6aecf Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 19:29:54 +0200 Subject: [PATCH 107/244] (feat) add method to run the stop in the background --- routers/bot_orchestration.py | 176 +++++++++++++++++++++++------------ 1 file changed, 115 insertions(+), 61 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index f7fac1d4..9af54c25 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -2,7 +2,7 @@ import os import asyncio from datetime import datetime -from fastapi import APIRouter, HTTPException, Depends +from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks from models import StartBotAction, StopBotAction, HummingbotInstanceConfig, V2ControllerDeployment from services.bots_orchestrator import BotsOrchestrator @@ -151,9 +151,101 @@ async def stop_bot(action: StopBotAction, bots_manager: BotsOrchestrator = Depen return {"status": "success", "response": response} +async def _background_stop_and_archive( + bot_name: str, + actual_bot_name: str, + container_name: str, + bot_name_for_orchestrator: str, + skip_order_cancellation: bool, + archive_locally: bool, + s3_bucket: str, + bots_manager: BotsOrchestrator, + docker_manager: DockerService, + bot_archiver: BotArchiver +): + """Background task to handle the stop and archive process""" + try: + logging.info(f"Starting background stop-and-archive for {bot_name}") + + # Step 1: Stop the bot trading process + logging.info(f"Stopping bot trading process for {bot_name_for_orchestrator}") + stop_response = await bots_manager.stop_bot( + bot_name_for_orchestrator, + skip_order_cancellation=skip_order_cancellation, + async_backend=True # Always use async for background tasks + ) + + if not stop_response or not stop_response.get("success", False): + error_msg = stop_response.get('error', 'Unknown error') if stop_response else 'No response from bot orchestrator' + logging.error(f"Failed to stop bot process: {error_msg}") + return + + # Step 2: Wait for graceful shutdown (15 seconds as requested) + logging.info(f"Waiting 15 seconds for bot {bot_name} to gracefully shutdown") + await asyncio.sleep(15) + + # Step 3: Stop the container with monitoring + max_retries = 10 + retry_interval = 2 + container_stopped = False + + for i in range(max_retries): + logging.info(f"Attempting to stop container {container_name} (attempt {i+1}/{max_retries})") + stop_container_response = docker_manager.stop_container(container_name) + + if stop_container_response.get("success", False): + container_stopped = True + break + + # Check if container is already stopped + container_status = docker_manager.get_container_status(container_name) + if container_status.get("state", {}).get("status") == "exited": + container_stopped = True + logging.info(f"Container {container_name} is already stopped") + break + + await asyncio.sleep(retry_interval) + + if not container_stopped: + logging.error(f"Failed to stop container {container_name} after {max_retries} attempts") + return + + # Step 4: Archive the bot data + instance_dir = os.path.join('bots', 'instances', container_name) + logging.info(f"Archiving bot data from {instance_dir}") + + try: + if archive_locally: + bot_archiver.archive_locally(container_name, instance_dir) + else: + bot_archiver.archive_and_upload(container_name, instance_dir, bucket_name=s3_bucket) + logging.info(f"Successfully archived bot data for {container_name}") + except Exception as e: + logging.error(f"Archive failed: {str(e)}") + # Continue with removal even if archive fails + + # Step 5: Remove the container + logging.info(f"Removing container {container_name}") + remove_response = docker_manager.remove_container(container_name, force=False) + + if not remove_response.get("success"): + # If graceful remove fails, try force remove + logging.warning("Graceful container removal failed, attempting force removal") + remove_response = docker_manager.remove_container(container_name, force=True) + + if remove_response.get("success"): + logging.info(f"Successfully completed stop-and-archive for bot {bot_name}") + else: + logging.error(f"Failed to remove container {container_name}") + + except Exception as e: + logging.error(f"Error in background stop-and-archive for {bot_name}: {str(e)}") + + @router.post("/stop-and-archive-bot/{bot_name}") async def stop_and_archive_bot( bot_name: str, + background_tasks: BackgroundTasks, skip_order_cancellation: bool = True, async_backend: bool = True, archive_locally: bool = True, @@ -164,13 +256,15 @@ async def stop_and_archive_bot( bot_archiver: BotArchiver = Depends(get_bot_archiver) ): """ - Gracefully stop a bot and archive its data. - This combines the complete shutdown workflow: + Gracefully stop a bot and archive its data in the background. + This initiates a background task that will: 1. Stop the bot trading process via MQTT - 2. Wait for graceful shutdown - 3. Stop the Docker container + 2. Wait 15 seconds for graceful shutdown + 3. Monitor and stop the Docker container 4. Archive the bot data (locally or to S3) 5. Remove the container + + Returns immediately with a success message while the process continues in the background. """ try: # Step 1: Normalize bot name and container name @@ -205,77 +299,37 @@ async def stop_and_archive_bot( } } - # Step 3: Stop the bot trading process # Use the format that's actually stored in active bots bot_name_for_orchestrator = container_name if container_name in active_bots else actual_bot_name - logging.info(f"Stopping bot trading process for {bot_name_for_orchestrator}") - stop_response = await bots_manager.stop_bot( - bot_name_for_orchestrator, + + # Add the background task + background_tasks.add_task( + _background_stop_and_archive, + bot_name=bot_name, + actual_bot_name=actual_bot_name, + container_name=container_name, + bot_name_for_orchestrator=bot_name_for_orchestrator, skip_order_cancellation=skip_order_cancellation, - async_backend=async_backend + archive_locally=archive_locally, + s3_bucket=s3_bucket, + bots_manager=bots_manager, + docker_manager=docker_manager, + bot_archiver=bot_archiver ) - if not stop_response or not stop_response.get("success", False): - error_msg = stop_response.get('error', 'Unknown error') if stop_response else 'No response from bot orchestrator' - return { - "status": "error", - "message": f"Failed to stop bot process: {error_msg}", - "details": { - "input_name": bot_name, - "actual_bot_name": actual_bot_name, - "container_name": container_name, - "stop_response": stop_response - } - } - - # Step 3: Wait a bit for graceful shutdown - await asyncio.sleep(5) # Give the bot time to clean up - - # Step 4: Stop the container - logging.info(f"Stopping container {container_name}") - stop_container_response = docker_manager.stop_container(container_name) - - if not stop_container_response.get("success", True): - logging.warning(f"Container stop returned: {stop_container_response}") - - # Step 5: Archive the bot data - instance_dir = os.path.join('bots', 'instances', container_name) - logging.info(f"Archiving bot data from {instance_dir}") - - try: - if archive_locally: - bot_archiver.archive_locally(container_name, instance_dir) - else: - bot_archiver.archive_and_upload(container_name, instance_dir, bucket_name=s3_bucket) - except Exception as e: - logging.error(f"Archive failed: {str(e)}") - # Continue with removal even if archive fails - - # Step 6: Remove the container - logging.info(f"Removing container {container_name}") - remove_response = docker_manager.remove_container(container_name, force=False) - - if not remove_response.get("success"): - # If graceful remove fails, try force remove - logging.warning("Graceful container removal failed, attempting force removal") - remove_response = docker_manager.remove_container(container_name, force=True) - return { "status": "success", - "message": f"Bot {actual_bot_name} stopped and archived successfully", + "message": f"Stop and archive process started for bot {actual_bot_name}", "details": { "input_name": bot_name, "actual_bot_name": actual_bot_name, "container_name": container_name, - "bot_stopped": True, - "container_stopped": stop_container_response.get("success", True), - "archived": archive_locally or s3_bucket is not None, - "container_removed": remove_response.get("success", False) + "process": "The bot will be gracefully stopped, archived, and removed in the background. This process typically takes 20-30 seconds." } } except Exception as e: - logging.error(f"Error in stop_and_archive_bot for {bot_name}: {str(e)}") + logging.error(f"Error initiating stop_and_archive_bot for {bot_name}: {str(e)}") raise HTTPException(status_code=500, detail=str(e)) From 230ebff48cbed257a58feee1989f6a7970eda770 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 19 Jun 2025 19:30:03 +0200 Subject: [PATCH 108/244] (feat) remove set leverage from trading --- routers/trading.py | 30 +----------------------------- 1 file changed, 1 insertion(+), 29 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index 31dcf674..e7825c23 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -7,7 +7,7 @@ from services.accounts_service import AccountsService from deps import get_accounts_service, get_market_data_feed_manager from models import PaginatedResponse -from models.bot import TradeRequest, TradeResponse, LeverageRequest +from models.bot import TradeRequest, TradeResponse router = APIRouter(tags=["Trading"], prefix="/trading") @@ -171,34 +171,6 @@ async def place_trade(trade_request: TradeRequest, raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") -@router.post("/leverage", response_model=Dict[str, str], status_code=status.HTTP_200_OK) -async def set_leverage(leverage_request: LeverageRequest, - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Set leverage for a specific trading pair on a perpetual connector. - - Args: - leverage_request: Leverage request with account, connector, trading pair, and leverage value - accounts_service: Injected accounts service - - Returns: - Dictionary with success status and message - - Raises: - HTTPException: 400 for invalid parameters or non-perpetual connector, 404 for account/connector not found, 500 for execution errors - """ - try: - result = await accounts_service.set_leverage( - account_name=leverage_request.account_name, - connector_name=leverage_request.connector_name, - trading_pair=leverage_request.trading_pair, - leverage=leverage_request.leverage - ) - return result - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") # Order Management From fc0cc34aacde6b21740c4242713563b3be220dda Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:35:56 +0200 Subject: [PATCH 109/244] (feat) remove data folder --- bots/data/.gitignore | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 bots/data/.gitignore diff --git a/bots/data/.gitignore b/bots/data/.gitignore deleted file mode 100644 index e69de29b..00000000 From 2f35f941f24d9cd7e066bb82006950e4414dbd2a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:36:02 +0200 Subject: [PATCH 110/244] (feat) update v2 script version --- bots/scripts/v2_with_controllers.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/bots/scripts/v2_with_controllers.py b/bots/scripts/v2_with_controllers.py index 909b3b1f..df94f08a 100644 --- a/bots/scripts/v2_with_controllers.py +++ b/bots/scripts/v2_with_controllers.py @@ -37,7 +37,6 @@ def __init__(self, connectors: Dict[str, ConnectorBase], config: V2WithControlle super().__init__(connectors, config) self.config = config self.max_pnl_by_controller = {} - self.performance_reports = {} self.max_global_pnl = Decimal("0") self.drawdown_exited_controllers = [] self.closed_executors_buffer: int = 30 @@ -64,7 +63,6 @@ async def on_stop(self): def on_tick(self): super().on_tick() - self.performance_reports = {controller_id: self.executor_orchestrator.generate_performance_report(controller_id=controller_id).dict() for controller_id in self.controllers.keys()} self.check_manual_kill_switch() self.control_max_drawdown() self.send_performance_report() @@ -79,7 +77,7 @@ def check_max_controller_drawdown(self): for controller_id, controller in self.controllers.items(): if controller.status != RunnableStatus.RUNNING: continue - controller_pnl = self.performance_reports[controller_id]["global_pnl_quote"] + controller_pnl = self.get_performance_report(controller_id).global_pnl_quote last_max_pnl = self.max_pnl_by_controller[controller_id] if controller_pnl > last_max_pnl: self.max_pnl_by_controller[controller_id] = controller_pnl @@ -89,7 +87,7 @@ def check_max_controller_drawdown(self): self.logger().info(f"Controller {controller_id} reached max drawdown. Stopping the controller.") controller.stop() executors_order_placed = self.filter_executors( - executors=self.executors_info[controller_id], + executors=self.get_executors_by_controller(controller_id), filter_func=lambda x: x.is_active and not x.is_trading, ) self.executor_orchestrator.execute_actions( @@ -98,7 +96,7 @@ def check_max_controller_drawdown(self): self.drawdown_exited_controllers.append(controller_id) def check_max_global_drawdown(self): - current_global_pnl = sum([report["global_pnl_quote"] for report in self.performance_reports.values()]) + current_global_pnl = sum([self.get_performance_report(controller_id).global_pnl_quote for controller_id in self.controllers.keys()]) if current_global_pnl > self.max_global_pnl: self.max_global_pnl = current_global_pnl else: @@ -110,7 +108,8 @@ def check_max_global_drawdown(self): def send_performance_report(self): if self.current_timestamp - self._last_performance_report_timestamp >= self.performance_report_interval and self.mqtt_enabled: - self._pub(self.performance_reports) + performance_reports = {controller_id: self.get_performance_report(controller_id).dict() for controller_id in self.controllers.keys()} + self._pub(performance_reports) self._last_performance_report_timestamp = self.current_timestamp def check_manual_kill_switch(self): From bd9cf898515066a145af6d0ae2ae1dd988cbf7a1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:36:18 +0200 Subject: [PATCH 111/244] (feat) refactor bots models --- models/__init__.py | 177 +++++++++++++++++++++++++++++++----- models/bot.py | 108 ---------------------- models/bot_orchestration.py | 113 +++++++++++++++++++++++ 3 files changed, 267 insertions(+), 131 deletions(-) delete mode 100644 models/bot.py create mode 100644 models/bot_orchestration.py diff --git a/models/__init__.py b/models/__init__.py index ce7f213c..296884c9 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -1,54 +1,185 @@ -# Bot models -from .bot import ( - ControllerType, - Script, - ScriptConfig, - Controller, - ControllerConfig, +""" +Model definitions for the Backend API. + +Each model file corresponds to a router file with the same name. +Models are organized by functional domain to match the API structure. +""" + +# Bot orchestration models (bot lifecycle management) +from .bot_orchestration import ( BotAction, StartBotAction, StopBotAction, ImportStrategyAction, ConfigureBotAction, ShortcutAction, + BotStatus, + BotHistoryRequest, + BotHistoryResponse, + MQTTStatus, + AllBotsStatusResponse, + StopAndArchiveRequest, + StopAndArchiveResponse, + V2ScriptDeployment, + V2ControllerDeployment, +) + +# Trading models +from .trading import ( + TradeRequest, + TradeResponse, + TokenInfo, + ConnectorBalance, + AccountBalance, + PortfolioState, + OrderInfo, + ActiveOrdersResponse, + OrderSummary, + TradeInfo, + TradingRulesInfo, + OrderTypesResponse, +) + +# Controller models +from .controllers import ( + ControllerType, + Controller, + ControllerResponse, + ControllerConfig, + ControllerConfigResponse, +) + +# Script models +from .scripts import ( + Script, + ScriptResponse, + ScriptConfig, + ScriptConfigResponse, +) + +# Database models +from .databases import ( + DatabaseInfo, + DatabaseListResponse, + DatabaseReadRequest, + DatabaseReadResponse, + CheckpointRequest, + CheckpointResponse, + CheckpointListResponse, + CheckpointData, + CheckpointLoadRequest, +) + +# Performance models +from .performance import ( + ExecutorInfo, + PerformanceRequest, + PerformanceResults, + PerformanceResponse, +) + +# Market data models +from .market_data import ( + CandleData, + CandlesResponse, + ActiveFeedInfo, + ActiveFeedsResponse, + MarketDataSettings, + TradingRulesResponse, + SupportedOrderTypesResponse, +) + +# Account models +from .accounts import ( + LeverageRequest, + PositionModeRequest, + CredentialRequest, ) -# Deployment models -from .deployment import V2ScriptDeployment, V2ControllerDeployment # Docker models from .docker import DockerImage +# Backtesting models +from .backtesting import BacktestingConfig + # Pagination models from .pagination import PaginatedResponse, PaginationParams, TimeRangePaginationParams -# Backward compatibility aliases -HummingbotInstanceConfig = V2ScriptDeployment # For backward compatibility -ImageName = DockerImage # For backward compatibility - __all__ = [ - # Bot models - "ControllerType", - "Script", - "ScriptConfig", - "Controller", - "ControllerConfig", + # Bot orchestration models "BotAction", "StartBotAction", "StopBotAction", "ImportStrategyAction", "ConfigureBotAction", "ShortcutAction", - # Deployment models + "BotStatus", + "BotHistoryRequest", + "BotHistoryResponse", + "MQTTStatus", + "AllBotsStatusResponse", + "StopAndArchiveRequest", + "StopAndArchiveResponse", "V2ScriptDeployment", "V2ControllerDeployment", + # Trading models + "TradeRequest", + "TradeResponse", + "TokenInfo", + "ConnectorBalance", + "AccountBalance", + "PortfolioState", + "OrderInfo", + "ActiveOrdersResponse", + "OrderSummary", + "TradeInfo", + "TradingRulesInfo", + "OrderTypesResponse", + # Controller models + "ControllerType", + "Controller", + "ControllerResponse", + "ControllerConfig", + "ControllerConfigResponse", + # Script models + "Script", + "ScriptResponse", + "ScriptConfig", + "ScriptConfigResponse", + # Database models + "DatabaseInfo", + "DatabaseListResponse", + "DatabaseReadRequest", + "DatabaseReadResponse", + "CheckpointRequest", + "CheckpointResponse", + "CheckpointListResponse", + "CheckpointData", + "CheckpointLoadRequest", + # Performance models + "ExecutorInfo", + "PerformanceRequest", + "PerformanceResults", + "PerformanceResponse", + # Market data models + "CandleData", + "CandlesResponse", + "ActiveFeedInfo", + "ActiveFeedsResponse", + "MarketDataSettings", + "TradingRulesResponse", + "SupportedOrderTypesResponse", + # Account models + "LeverageRequest", + "PositionModeRequest", + "CredentialRequest", # Docker models "DockerImage", + # Backtesting models + "BacktestingConfig", # Pagination models "PaginatedResponse", "PaginationParams", "TimeRangePaginationParams", - # Backward compatibility - "HummingbotInstanceConfig", # Alias for V2ScriptDeployment - "ImageName", # Alias for DockerImage ] \ No newline at end of file diff --git a/models/bot.py b/models/bot.py deleted file mode 100644 index 94854a64..00000000 --- a/models/bot.py +++ /dev/null @@ -1,108 +0,0 @@ -from typing import Any, Dict, Optional -from pydantic import BaseModel, Field -from enum import Enum -from decimal import Decimal -from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction - - -class ControllerType(str, Enum): - DIRECTIONAL_TRADING = "directional_trading" - MARKET_MAKING = "market_making" - GENERIC = "generic" - - -class FileContent(BaseModel): - """Base model for file content""" - content: str = Field(description="File content") - - -class ConfigContent(BaseModel): - """Base model for configuration content""" - content: Dict[str, Any] = Field(description="Configuration content as dictionary") - - -class TypedFileContent(FileContent): - """File content with a type classification""" - type: Optional[ControllerType] = Field(default=None, description="Content category") - - -# Specific models using base classes -class Script(FileContent): - """Python script content""" - pass - - -class ScriptConfig(ConfigContent): - """Script configuration content""" - pass - - -class Controller(TypedFileContent): - """Controller content with optional type (type can come from URL path)""" - pass - - -class ControllerConfig(ConfigContent): - """Controller configuration content""" - pass - - -class TradeRequest(BaseModel): - """Request model for placing trades""" - account_name: str = Field(description="Name of the account to trade with") - connector_name: str = Field(description="Name of the connector/exchange") - trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") - trade_type: TradeType = Field(description="Whether to buy or sell") - amount: Decimal = Field(description="Amount to trade", gt=0) - order_type: OrderType = Field(default=OrderType.LIMIT, description="Type of order") - price: Optional[Decimal] = Field(default=None, description="Price for limit orders") - position_action: PositionAction = Field(default=PositionAction.OPEN, description="Position action for perpetual contracts (OPEN/CLOSE)") - - -class TradeResponse(BaseModel): - """Response model for trade execution""" - order_id: str = Field(description="Client order ID assigned by the connector") - account_name: str = Field(description="Account used for the trade") - connector_name: str = Field(description="Connector used for the trade") - trading_pair: str = Field(description="Trading pair") - trade_type: TradeType = Field(description="Trade type") - amount: Decimal = Field(description="Trade amount") - order_type: OrderType = Field(description="Order type") - price: Optional[Decimal] = Field(description="Order price") - status: str = Field(default="submitted", description="Order status") - - -class LeverageRequest(BaseModel): - """Request model for setting leverage on perpetual connectors""" - account_name: str = Field(description="Name of the account") - connector_name: str = Field(description="Name of the perpetual connector") - trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") - leverage: int = Field(description="Leverage value (typically 1-125)", ge=1, le=125) - - -class BotAction(BaseModel): - bot_name: str = Field(description="Name of the bot instance to act upon") - - -class StartBotAction(BotAction): - log_level: Optional[str] = Field(default=None, description="Logging level (DEBUG, INFO, WARNING, ERROR)") - script: Optional[str] = Field(default=None, description="Script name to run (without .py extension)") - conf: Optional[str] = Field(default=None, description="Configuration file name (without .yml extension)") - async_backend: bool = Field(default=False, description="Whether to run in async backend mode") - - -class StopBotAction(BotAction): - skip_order_cancellation: bool = Field(default=False, description="Whether to skip cancelling open orders when stopping") - async_backend: bool = Field(default=False, description="Whether to run in async backend mode") - - -class ImportStrategyAction(BotAction): - strategy: str = Field(description="Name of the strategy to import") - - -class ConfigureBotAction(BotAction): - params: dict = Field(description="Configuration parameters to update") - - -class ShortcutAction(BotAction): - params: list = Field(description="List of shortcut parameters") \ No newline at end of file diff --git a/models/bot_orchestration.py b/models/bot_orchestration.py new file mode 100644 index 00000000..61ff5341 --- /dev/null +++ b/models/bot_orchestration.py @@ -0,0 +1,113 @@ +from typing import Any, Dict, Optional, List +from pydantic import BaseModel, Field +from enum import Enum + + +class BotAction(BaseModel): + """Base class for bot actions""" + bot_name: str = Field(description="Name of the bot instance to act upon") + + +class StartBotAction(BotAction): + """Action to start a bot""" + log_level: Optional[str] = Field(default=None, description="Logging level (DEBUG, INFO, WARNING, ERROR)") + script: Optional[str] = Field(default=None, description="Script name to run (without .py extension)") + conf: Optional[str] = Field(default=None, description="Configuration file name (without .yml extension)") + async_backend: bool = Field(default=False, description="Whether to run in async backend mode") + + +class StopBotAction(BotAction): + """Action to stop a bot""" + skip_order_cancellation: bool = Field(default=False, description="Whether to skip cancelling open orders when stopping") + async_backend: bool = Field(default=False, description="Whether to run in async backend mode") + + +class ImportStrategyAction(BotAction): + """Action to import a strategy for a bot""" + strategy: str = Field(description="Name of the strategy to import") + + +class ConfigureBotAction(BotAction): + """Action to configure bot parameters""" + params: dict = Field(description="Configuration parameters to update") + + +class ShortcutAction(BotAction): + """Action to execute bot shortcuts""" + params: list = Field(description="List of shortcut parameters") + + +class BotStatus(BaseModel): + """Status information for a bot""" + bot_name: str = Field(description="Bot name") + status: str = Field(description="Bot status (running, stopped, etc.)") + uptime: Optional[float] = Field(None, description="Bot uptime in seconds") + performance: Optional[Dict[str, Any]] = Field(None, description="Performance metrics") + + +class BotHistoryRequest(BaseModel): + """Request for bot trading history""" + bot_name: str = Field(description="Bot name") + days: int = Field(default=0, description="Number of days of history (0 for all)") + verbose: bool = Field(default=False, description="Include verbose information") + precision: Optional[int] = Field(None, description="Decimal precision for numbers") + timeout: float = Field(default=30.0, description="Request timeout in seconds") + + +class BotHistoryResponse(BaseModel): + """Response for bot trading history""" + bot_name: str = Field(description="Bot name") + history: Dict[str, Any] = Field(description="Trading history data") + status: str = Field(description="Response status") + + +class MQTTStatus(BaseModel): + """MQTT connection status""" + mqtt_connected: bool = Field(description="Whether MQTT is connected") + discovered_bots: List[str] = Field(description="List of discovered bots") + active_bots: List[str] = Field(description="List of active bots") + broker_host: str = Field(description="MQTT broker host") + broker_port: int = Field(description="MQTT broker port") + broker_username: Optional[str] = Field(None, description="MQTT broker username") + client_state: str = Field(description="MQTT client state") + + +class AllBotsStatusResponse(BaseModel): + """Response for all bots status""" + bots: List[BotStatus] = Field(description="List of bot statuses") + + +class StopAndArchiveRequest(BaseModel): + """Request for stopping and archiving a bot""" + skip_order_cancellation: bool = Field(default=True, description="Skip order cancellation") + async_backend: bool = Field(default=True, description="Use async backend") + archive_locally: bool = Field(default=True, description="Archive locally") + s3_bucket: Optional[str] = Field(None, description="S3 bucket for archiving") + timeout: float = Field(default=30.0, description="Operation timeout") + + +class StopAndArchiveResponse(BaseModel): + """Response for stop and archive operation""" + status: str = Field(description="Operation status") + message: str = Field(description="Status message") + details: Dict[str, Any] = Field(description="Operation details") + + +# Bot deployment models +class V2ScriptDeployment(BaseModel): + """Configuration for deploying a bot with a script""" + instance_name: str = Field(description="Unique name for the bot instance") + credentials_profile: str = Field(description="Name of the credentials profile to use") + image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") + script: Optional[str] = Field(default=None, description="Name of the script to run (without .py extension)") + script_config: Optional[str] = Field(default=None, description="Name of the script configuration file (without .yml extension)") + + +class V2ControllerDeployment(BaseModel): + """Configuration for deploying a bot with controllers""" + instance_name: str = Field(description="Unique name for the bot instance") + credentials_profile: str = Field(description="Name of the credentials profile to use") + controllers_config: List[str] = Field(description="List of controller configuration files to use (without .yml extension)") + max_global_drawdown: Optional[float] = Field(default=None, description="Maximum allowed global drawdown percentage (0.0-1.0)") + max_controller_drawdown: Optional[float] = Field(default=None, description="Maximum allowed per-controller drawdown percentage (0.0-1.0)") + image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") \ No newline at end of file From 37b7041b488b724bebd56a4b09dc34b95fc7b110 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:36:26 +0200 Subject: [PATCH 112/244] (feat) remove deplyment --- models/deployment.py | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 models/deployment.py diff --git a/models/deployment.py b/models/deployment.py deleted file mode 100644 index c0d60abc..00000000 --- a/models/deployment.py +++ /dev/null @@ -1,19 +0,0 @@ -from typing import Optional, List -from pydantic import BaseModel, Field - - -class V2ScriptDeployment(BaseModel): - instance_name: str = Field(description="Unique name for the bot instance") - credentials_profile: str = Field(description="Name of the credentials profile to use") - image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") - script: Optional[str] = Field(default=None, description="Name of the script to run (without .py extension)") - script_config: Optional[str] = Field(default=None, description="Name of the script configuration file (without .yml extension)") - - -class V2ControllerDeployment(BaseModel): - instance_name: str = Field(description="Unique name for the bot instance") - credentials_profile: str = Field(description="Name of the credentials profile to use") - controllers_config: List[str] = Field(description="List of controller configuration files to use (without .yml extension)") - max_global_drawdown: Optional[float] = Field(default=None, description="Maximum allowed global drawdown percentage (0.0-1.0)") - max_controller_drawdown: Optional[float] = Field(default=None, description="Maximum allowed per-controller drawdown percentage (0.0-1.0)") - image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") \ No newline at end of file From 555a5dcd52b4bab549252726d1781b177d4c49b2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:37:02 +0200 Subject: [PATCH 113/244] (feat) add files for controllers and scripts --- models/controllers.py | 52 +++++++++++++++++++++++++++++++++++++++++++ models/scripts.py | 34 ++++++++++++++++++++++++++++ 2 files changed, 86 insertions(+) create mode 100644 models/controllers.py create mode 100644 models/scripts.py diff --git a/models/controllers.py b/models/controllers.py new file mode 100644 index 00000000..a2eeff82 --- /dev/null +++ b/models/controllers.py @@ -0,0 +1,52 @@ +from typing import Dict, List, Optional, Any +from pydantic import BaseModel, Field +from enum import Enum + + +class ControllerType(str, Enum): + """Types of controllers available""" + DIRECTIONAL_TRADING = "directional_trading" + MARKET_MAKING = "market_making" + GENERIC = "generic" + + +# Controller file operations +class Controller(BaseModel): + """Controller file content""" + content: str = Field(description="Controller source code") + type: Optional[ControllerType] = Field(None, description="Controller type (optional for flexibility)") + + +class ControllerResponse(BaseModel): + """Response for getting a controller""" + name: str = Field(description="Controller name") + type: str = Field(description="Controller type") + content: str = Field(description="Controller source code") + + +# Controller configuration operations +class ControllerConfig(BaseModel): + """Controller configuration""" + controller_name: str = Field(description="Controller name") + controller_type: str = Field(description="Controller type") + connector_name: Optional[str] = Field(None, description="Connector name") + trading_pair: Optional[str] = Field(None, description="Trading pair") + total_amount_quote: Optional[float] = Field(None, description="Total amount in quote currency") + + +class ControllerConfigResponse(BaseModel): + """Response for controller configuration with metadata""" + config_name: str = Field(description="Configuration name") + controller_name: str = Field(description="Controller name") + controller_type: str = Field(description="Controller type") + connector_name: Optional[str] = Field(None, description="Connector name") + trading_pair: Optional[str] = Field(None, description="Trading pair") + total_amount_quote: Optional[float] = Field(None, description="Total amount in quote currency") + error: Optional[str] = Field(None, description="Error message if config is malformed") + + +# Bot-specific controller configurations +class BotControllerConfig(BaseModel): + """Controller configuration for a specific bot""" + config_name: str = Field(description="Configuration name") + config_data: Dict[str, Any] = Field(description="Configuration data") \ No newline at end of file diff --git a/models/scripts.py b/models/scripts.py new file mode 100644 index 00000000..fd60b07d --- /dev/null +++ b/models/scripts.py @@ -0,0 +1,34 @@ +from typing import Dict, List, Optional, Any +from pydantic import BaseModel, Field + + +# Script file operations +class Script(BaseModel): + """Script file content""" + content: str = Field(description="Script source code") + + +class ScriptResponse(BaseModel): + """Response for getting a script""" + name: str = Field(description="Script name") + content: str = Field(description="Script source code") + + +# Script configuration operations +class ScriptConfig(BaseModel): + """Script configuration content""" + config_name: str = Field(description="Configuration name") + script_file_name: str = Field(description="Script file name") + controllers_config: List[str] = Field(default=[], description="List of controller configurations") + candles_config: List[Dict[str, Any]] = Field(default=[], description="Candles configuration") + markets: Dict[str, Any] = Field(default={}, description="Markets configuration") + + +class ScriptConfigResponse(BaseModel): + """Response for script configuration with metadata""" + config_name: str = Field(description="Configuration name") + script_file_name: str = Field(description="Script file name") + controllers_config: List[str] = Field(default=[], description="List of controller configurations") + candles_config: List[Dict[str, Any]] = Field(default=[], description="Candles configuration") + markets: Dict[str, Any] = Field(default={}, description="Markets configuration") + error: Optional[str] = Field(None, description="Error message if config is malformed") \ No newline at end of file From 5a7035d52b8dc0147e9e833b56b9cee7ab0b8a27 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:37:17 +0200 Subject: [PATCH 114/244] (feat) add accounts and trading --- models/accounts.py | 18 +++++++ models/trading.py | 118 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 136 insertions(+) create mode 100644 models/accounts.py create mode 100644 models/trading.py diff --git a/models/accounts.py b/models/accounts.py new file mode 100644 index 00000000..4f7b8c2a --- /dev/null +++ b/models/accounts.py @@ -0,0 +1,18 @@ +from pydantic import BaseModel, Field +from typing import Dict, Any + + +class LeverageRequest(BaseModel): + """Request model for setting leverage on perpetual connectors""" + trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") + leverage: int = Field(description="Leverage value (typically 1-125)", ge=1, le=125) + + +class PositionModeRequest(BaseModel): + """Request model for setting position mode on perpetual connectors""" + position_mode: str = Field(description="Position mode (HEDGE or ONEWAY)") + + +class CredentialRequest(BaseModel): + """Request model for adding connector credentials""" + credentials: Dict[str, Any] = Field(description="Connector credentials dictionary") \ No newline at end of file diff --git a/models/trading.py b/models/trading.py new file mode 100644 index 00000000..de020e6c --- /dev/null +++ b/models/trading.py @@ -0,0 +1,118 @@ +from typing import Dict, List, Optional, Any +from pydantic import BaseModel, Field +from decimal import Decimal +from datetime import datetime +from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction + + +class TradeRequest(BaseModel): + """Request model for placing trades""" + account_name: str = Field(description="Name of the account to trade with") + connector_name: str = Field(description="Name of the connector/exchange") + trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") + trade_type: TradeType = Field(description="Whether to buy or sell") + amount: Decimal = Field(description="Amount to trade", gt=0) + order_type: OrderType = Field(default=OrderType.LIMIT, description="Type of order") + price: Optional[Decimal] = Field(default=None, description="Price for limit orders") + position_action: PositionAction = Field(default=PositionAction.OPEN, description="Position action for perpetual contracts (OPEN/CLOSE)") + + +class TradeResponse(BaseModel): + """Response model for trade execution""" + order_id: str = Field(description="Client order ID assigned by the connector") + account_name: str = Field(description="Account used for the trade") + connector_name: str = Field(description="Connector used for the trade") + trading_pair: str = Field(description="Trading pair") + trade_type: TradeType = Field(description="Trade type") + amount: Decimal = Field(description="Trade amount") + order_type: OrderType = Field(description="Order type") + price: Optional[Decimal] = Field(description="Order price") + status: str = Field(default="submitted", description="Order status") + + +class TokenInfo(BaseModel): + """Information about a token balance""" + token: str = Field(description="Token symbol") + balance: Decimal = Field(description="Token balance") + value_usd: Optional[Decimal] = Field(None, description="USD value of the balance") + + +class ConnectorBalance(BaseModel): + """Balance information for a connector""" + connector_name: str = Field(description="Name of the connector") + tokens: List[TokenInfo] = Field(description="List of token balances") + + +class AccountBalance(BaseModel): + """Balance information for an account""" + account_name: str = Field(description="Name of the account") + connectors: List[ConnectorBalance] = Field(description="List of connector balances") + + +class PortfolioState(BaseModel): + """Complete portfolio state across all accounts""" + accounts: List[AccountBalance] = Field(description="List of account balances") + timestamp: datetime = Field(description="Timestamp of the portfolio state") + + +class OrderInfo(BaseModel): + """Information about an order""" + order_id: str = Field(description="Order ID") + client_order_id: str = Field(description="Client order ID") + account_name: str = Field(description="Account name") + connector_name: str = Field(description="Connector name") + trading_pair: str = Field(description="Trading pair") + order_type: str = Field(description="Order type") + trade_type: str = Field(description="Trade type (BUY/SELL)") + amount: Decimal = Field(description="Order amount") + price: Optional[Decimal] = Field(description="Order price") + filled_amount: Decimal = Field(description="Filled amount") + status: str = Field(description="Order status") + creation_timestamp: datetime = Field(description="Order creation time") + last_update_timestamp: datetime = Field(description="Last update time") + + +class ActiveOrdersResponse(BaseModel): + """Response for active orders""" + orders: Dict[str, OrderInfo] = Field(description="Dictionary of active orders") + + +class OrderSummary(BaseModel): + """Summary statistics for orders""" + total_orders: int = Field(description="Total number of orders") + filled_orders: int = Field(description="Number of filled orders") + cancelled_orders: int = Field(description="Number of cancelled orders") + fill_rate: float = Field(description="Order fill rate percentage") + total_volume_base: Decimal = Field(description="Total volume in base currency") + total_volume_quote: Decimal = Field(description="Total volume in quote currency") + avg_fill_time: Optional[float] = Field(description="Average fill time in seconds") + + +class TradeInfo(BaseModel): + """Information about a trade fill""" + trade_id: str = Field(description="Trade ID") + order_id: str = Field(description="Associated order ID") + account_name: str = Field(description="Account name") + connector_name: str = Field(description="Connector name") + trading_pair: str = Field(description="Trading pair") + trade_type: str = Field(description="Trade type (BUY/SELL)") + amount: Decimal = Field(description="Trade amount") + price: Decimal = Field(description="Trade price") + fee: Decimal = Field(description="Trade fee") + timestamp: datetime = Field(description="Trade timestamp") + + +class TradingRulesInfo(BaseModel): + """Trading rules for a trading pair""" + trading_pair: str = Field(description="Trading pair") + min_order_size: Decimal = Field(description="Minimum order size") + max_order_size: Optional[Decimal] = Field(description="Maximum order size") + min_price_increment: Decimal = Field(description="Minimum price increment") + min_base_amount_increment: Decimal = Field(description="Minimum base amount increment") + min_quote_amount_increment: Decimal = Field(description="Minimum quote amount increment") + + +class OrderTypesResponse(BaseModel): + """Response for supported order types""" + connector: str = Field(description="Connector name") + supported_order_types: List[str] = Field(description="List of supported order types") \ No newline at end of file From 7e0e8826d28f7604d8a8328cdbc346133aacd18d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:37:49 +0200 Subject: [PATCH 115/244] (feat) add extra models --- models/backtesting.py | 10 ++++++++ models/databases.py | 55 +++++++++++++++++++++++++++++++++++++++++++ models/market_data.py | 50 +++++++++++++++++++++++++++++++++++++++ models/performance.py | 33 ++++++++++++++++++++++++++ 4 files changed, 148 insertions(+) create mode 100644 models/backtesting.py create mode 100644 models/databases.py create mode 100644 models/market_data.py create mode 100644 models/performance.py diff --git a/models/backtesting.py b/models/backtesting.py new file mode 100644 index 00000000..c3cb5bd5 --- /dev/null +++ b/models/backtesting.py @@ -0,0 +1,10 @@ +from typing import Dict, Union +from pydantic import BaseModel + + +class BacktestingConfig(BaseModel): + start_time: int = 1735689600 # 2025-01-01 00:00:00 + end_time: int = 1738368000 # 2025-02-01 00:00:00 + backtesting_resolution: str = "1m" + trade_cost: float = 0.0006 + config: Union[Dict, str] \ No newline at end of file diff --git a/models/databases.py b/models/databases.py new file mode 100644 index 00000000..8dc9c1d5 --- /dev/null +++ b/models/databases.py @@ -0,0 +1,55 @@ +from typing import Dict, List, Optional, Any +from pydantic import BaseModel, Field + + +class DatabaseInfo(BaseModel): + """Information about a database""" + db_name: str = Field(description="Database name") + db_path: str = Field(description="Database file path") + healthy: bool = Field(description="Whether the database is healthy") + status: Dict[str, Any] = Field(description="Database status information") + tables: Dict[str, str] = Field(description="Database tables data (JSON strings)") + + +class DatabaseListResponse(BaseModel): + """Response for listing databases""" + databases: List[str] = Field(description="List of database file paths") + + +class DatabaseReadRequest(BaseModel): + """Request for reading databases""" + db_paths: List[str] = Field(description="List of database paths to read") + + +class DatabaseReadResponse(BaseModel): + """Response for reading databases""" + databases: List[DatabaseInfo] = Field(description="List of database information") + + +class CheckpointRequest(BaseModel): + """Request for creating a checkpoint""" + db_paths: List[str] = Field(description="List of database paths to include in checkpoint") + + +class CheckpointResponse(BaseModel): + """Response for checkpoint operations""" + message: str = Field(description="Operation result message") + success: bool = Field(default=True, description="Whether the operation was successful") + + +class CheckpointListResponse(BaseModel): + """Response for listing checkpoints""" + checkpoints: List[str] = Field(description="List of checkpoint file paths") + + +class CheckpointData(BaseModel): + """Data loaded from a checkpoint""" + executors: str = Field(description="Executors data (JSON string)") + orders: str = Field(description="Orders data (JSON string)") + trade_fill: str = Field(description="Trade fill data (JSON string)") + controllers: str = Field(description="Controllers data (JSON string)") + + +class CheckpointLoadRequest(BaseModel): + """Request for loading a checkpoint""" + checkpoint_path: str = Field(description="Path to the checkpoint file to load") \ No newline at end of file diff --git a/models/market_data.py b/models/market_data.py new file mode 100644 index 00000000..e8f21a38 --- /dev/null +++ b/models/market_data.py @@ -0,0 +1,50 @@ +from typing import Dict, List, Optional, Any +from pydantic import BaseModel, Field +from datetime import datetime + + +class CandleData(BaseModel): + """Single candle data point""" + timestamp: datetime = Field(description="Candle timestamp") + open: float = Field(description="Opening price") + high: float = Field(description="Highest price") + low: float = Field(description="Lowest price") + close: float = Field(description="Closing price") + volume: float = Field(description="Trading volume") + + +class CandlesResponse(BaseModel): + """Response for candles data""" + candles: List[CandleData] = Field(description="List of candle data") + + +class ActiveFeedInfo(BaseModel): + """Information about an active market data feed""" + connector: str = Field(description="Connector name") + trading_pair: str = Field(description="Trading pair") + interval: str = Field(description="Candle interval") + last_access: datetime = Field(description="Last access time") + expires_at: datetime = Field(description="Expiration time") + + +class ActiveFeedsResponse(BaseModel): + """Response for active market data feeds""" + feeds: List[ActiveFeedInfo] = Field(description="List of active feeds") + + +class MarketDataSettings(BaseModel): + """Market data configuration settings""" + cleanup_interval: int = Field(description="Cleanup interval in seconds") + feed_timeout: int = Field(description="Feed timeout in seconds") + description: str = Field(description="Settings description") + + +class TradingRulesResponse(BaseModel): + """Response for trading rules""" + trading_pairs: Dict[str, Dict[str, Any]] = Field(description="Trading rules by pair") + + +class SupportedOrderTypesResponse(BaseModel): + """Response for supported order types""" + connector: str = Field(description="Connector name") + supported_order_types: List[str] = Field(description="List of supported order types") \ No newline at end of file diff --git a/models/performance.py b/models/performance.py new file mode 100644 index 00000000..81cb6c9c --- /dev/null +++ b/models/performance.py @@ -0,0 +1,33 @@ +from typing import Dict, List, Any +from pydantic import BaseModel, Field + + +class ExecutorInfo(BaseModel): + """Information about an executor""" + id: str = Field(description="Executor ID") + trades: List[Dict[str, Any]] = Field(description="List of executor trades") + orders: List[Dict[str, Any]] = Field(description="List of executor orders") + + +class PerformanceRequest(BaseModel): + """Request for performance analysis""" + executors: List[ExecutorInfo] = Field(description="List of executor data for analysis") + + +class PerformanceResults(BaseModel): + """Performance analysis results""" + total_pnl: float = Field(description="Total PnL") + total_pnl_pct: float = Field(description="Total PnL percentage") + total_volume: float = Field(description="Total trading volume") + total_trades: int = Field(description="Total number of trades") + win_rate: float = Field(description="Win rate percentage") + profit_factor: float = Field(description="Profit factor") + sharpe_ratio: float = Field(description="Sharpe ratio") + max_drawdown: float = Field(description="Maximum drawdown") + avg_trade_pnl: float = Field(description="Average trade PnL") + + +class PerformanceResponse(BaseModel): + """Response for performance analysis""" + executors: List[ExecutorInfo] = Field(description="Original executor data") + results: PerformanceResults = Field(description="Performance analysis results") \ No newline at end of file From f6720f8c2b6625456f839a8b15aeb726c71f5db0 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:37:59 +0200 Subject: [PATCH 116/244] (feat) remove patches and let it fail --- patches/__init__.py | 6 ---- patches/config_helpers_patch.py | 49 --------------------------------- 2 files changed, 55 deletions(-) delete mode 100644 patches/__init__.py delete mode 100644 patches/config_helpers_patch.py diff --git a/patches/__init__.py b/patches/__init__.py deleted file mode 100644 index 0d281fbf..00000000 --- a/patches/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -Patches for third-party libraries used in the backend API -""" -from .config_helpers_patch import apply_config_helpers_patch, remove_config_helpers_patch - -__all__ = ['apply_config_helpers_patch', 'remove_config_helpers_patch'] \ No newline at end of file diff --git a/patches/config_helpers_patch.py b/patches/config_helpers_patch.py deleted file mode 100644 index 914a12ce..00000000 --- a/patches/config_helpers_patch.py +++ /dev/null @@ -1,49 +0,0 @@ -""" -Patch for Hummingbot's config_helpers.py to handle missing directories -""" -import logging -from pathlib import Path - -from hummingbot.client.config.config_helpers import ClientConfigAdapter - - -def patched_save_to_yml(yml_path: Path, cm: ClientConfigAdapter): - """ - Patched version of save_to_yml that creates directories if they don't exist - """ - try: - # Ensure the parent directory exists - yml_path.parent.mkdir(parents=True, exist_ok=True) - - cm_yml_str = cm.generate_yml_output_str_with_comments() - with open(yml_path, "w", encoding="utf-8") as outfile: - outfile.write(cm_yml_str) - except Exception as e: - logging.getLogger().error("Error writing configs: %s" % (str(e),), exc_info=True) - - -def apply_config_helpers_patch(): - """ - Apply the patch to hummingbot.client.config.config_helpers - """ - import hummingbot.client.config.config_helpers as config_helpers - - # Store the original function in case we need it - config_helpers._original_save_to_yml = config_helpers.save_to_yml - - # Replace with our patched version - config_helpers.save_to_yml = patched_save_to_yml - - logging.info("Applied config_helpers patch: save_to_yml now creates missing directories") - - -def remove_config_helpers_patch(): - """ - Remove the patch and restore original functionality - """ - import hummingbot.client.config.config_helpers as config_helpers - - if hasattr(config_helpers, '_original_save_to_yml'): - config_helpers.save_to_yml = config_helpers._original_save_to_yml - delattr(config_helpers, '_original_save_to_yml') - logging.info("Removed config_helpers patch: restored original save_to_yml") \ No newline at end of file From e7022e59fc17f17d1d3df060f2a5ecc6b566d301 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:38:17 +0200 Subject: [PATCH 117/244] (feat) final version of accounts router --- routers/accounts.py | 262 +++++++++++++++++++++++++------------------- 1 file changed, 152 insertions(+), 110 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index bc15f242..0450568a 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -3,31 +3,175 @@ from fastapi import APIRouter, HTTPException, Depends, Query from hummingbot.client.settings import AllConnectorSettings -from hummingbot.core.data_type.common import PositionMode -from pydantic import BaseModel from starlette import status from services.accounts_service import AccountsService -from utils.file_system import FileSystemUtil from deps import get_accounts_service from models import PaginatedResponse router = APIRouter(tags=["Accounts"], prefix="/accounts") -file_system = FileSystemUtil(base_path="bots/credentials") -class LeverageRequest(BaseModel): - trading_pair: str - leverage: int +# Portfolio & Account State Monitoring +@router.get("/portfolio/state", response_model=Dict[str, Dict[str, List[Dict]]]) +async def get_portfolio_state(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get the current state of all accounts portfolio. + + Returns: + Dict containing all account states with connector balances and token information + """ + return accounts_service.get_accounts_state() +@router.get("/portfolio/history", response_model=PaginatedResponse) +async def get_portfolio_history( + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get the historical state of all accounts portfolio with pagination. + """ + try: + data, next_cursor, has_more = await accounts_service.load_account_state_history( + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor + } + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/portfolio/state/{account_name}", response_model=Dict[str, List[Dict]]) +async def get_account_portfolio_state(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get current portfolio state of a specific account. + + Args: + account_name: Name of the account to get portfolio state for + + Returns: + Dictionary mapping connector names to lists of token information + + Raises: + HTTPException: 404 if account not found + """ + state = await accounts_service.get_account_current_state(account_name) + if not state: + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + return state + + +@router.get("/portfolio/history/{account_name}", response_model=PaginatedResponse) +async def get_account_portfolio_history( + account_name: str, + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get historical portfolio state of a specific account with pagination. + + Args: + account_name: Name of the account to get history for + limit: Number of items per page (1-1000) + cursor: Cursor for pagination (ISO timestamp) + start_time: Start time for filtering results + end_time: End time for filtering results + + Returns: + Paginated response with historical account portfolio data + """ + data, next_cursor, has_more = await accounts_service.get_account_state_history( + account_name=account_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor, + "filters": { + "account_name": account_name, + "start_time": start_time.isoformat() if start_time else None, + "end_time": end_time.isoformat() if end_time else None + } + } + ) + + +@router.get("/portfolio/distribution") +async def get_portfolio_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get portfolio distribution by tokens with percentages across all accounts. + + Returns: + Dictionary with token distribution including percentages, values, and breakdown by accounts/connectors + """ + return accounts_service.get_portfolio_distribution() + + +@router.get("/portfolio/distribution/{account_name}") +async def get_account_portfolio_distribution(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get portfolio distribution by tokens with percentages for a specific account. + + Args: + account_name: Name of the account to get distribution for + + Returns: + Dictionary with token distribution for the specified account + + Raises: + HTTPException: 404 if account not found + """ + result = accounts_service.get_portfolio_distribution(account_name) + + # Check if account exists by looking at the distribution + if result.get("token_count", 0) == 0 and not result.get("error") and account_name not in accounts_service.get_accounts_state(): + raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") + + return result + + +@router.get("/portfolio/accounts-distribution") +async def get_accounts_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get portfolio distribution by accounts with percentages. + + Returns: + Dictionary with account distribution including percentages, values, and breakdown by connectors + """ + return accounts_service.get_account_distribution() @router.get("/connectors", response_model=List[str]) async def available_connectors(): """ Get a list of all available connectors. - + Returns: List of connector names supported by the system """ @@ -174,106 +318,4 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di raise HTTPException(status_code=400, detail=str(e)) -class PositionModeRequest(BaseModel): - position_mode: str - -@router.post("/{account_name}/{connector_name}/position-mode") -async def set_position_mode( - account_name: str, - connector_name: str, - request: PositionModeRequest, - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Set position mode for a perpetual connector. - - Args: - account_name: Name of the account - connector_name: Name of the perpetual connector - position_mode: Position mode to set (HEDGE or ONEWAY) - - Returns: - Success message with status - - Raises: - HTTPException: 400 if not a perpetual connector or invalid position mode - """ - try: - # Convert string to PositionMode enum - mode = PositionMode[request.position_mode.upper()] - result = await accounts_service.set_position_mode(account_name, connector_name, mode) - return result - except KeyError: - raise HTTPException( - status_code=400, - detail=f"Invalid position mode '{request.position_mode}'. Must be 'HEDGE' or 'ONEWAY'" - ) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@router.get("/{account_name}/{connector_name}/position-mode") -async def get_position_mode( - account_name: str, - connector_name: str, - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get current position mode for a perpetual connector. - - Args: - account_name: Name of the account - connector_name: Name of the perpetual connector - - Returns: - Dictionary with current position mode, connector name, and account name - - Raises: - HTTPException: 400 if not a perpetual connector - """ - try: - result = await accounts_service.get_position_mode(account_name, connector_name) - return result - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@router.post("/{account_name}/{connector_name}/leverage") -async def set_leverage( - account_name: str, - connector_name: str, - request: LeverageRequest, - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Set leverage for a specific trading pair on a perpetual connector. - - Args: - account_name: Name of the account - connector_name: Name of the perpetual connector - request: Leverage request with trading pair and leverage value - accounts_service: Injected accounts service - - Returns: - Dictionary with success status and message - - Raises: - HTTPException: 400 for invalid parameters or non-perpetual connector, 404 for account/connector not found, 500 for execution errors - """ - try: - result = await accounts_service.set_leverage( - account_name=account_name, - connector_name=connector_name, - trading_pair=request.trading_pair, - leverage=request.leverage - ) - return result - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") From 7a53e71c793eb693dcb7385696f2087ab262f744 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:38:32 +0200 Subject: [PATCH 118/244] (feat) refactor backtesting and --- routers/backtesting.py | 12 +----------- routers/databases.py | 7 +++---- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/routers/backtesting.py b/routers/backtesting.py index 410bea14..3d68ee9b 100644 --- a/routers/backtesting.py +++ b/routers/backtesting.py @@ -1,25 +1,15 @@ -from typing import Dict, Union - from fastapi import APIRouter from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory from hummingbot.strategy_v2.backtesting.backtesting_engine_base import BacktestingEngineBase -from pydantic import BaseModel from config import settings +from models.backtesting import BacktestingConfig router = APIRouter(tags=["Backtesting"], prefix="/backtesting") candles_factory = CandlesFactory() backtesting_engine = BacktestingEngineBase() -class BacktestingConfig(BaseModel): - start_time: int = 1735689600 # 2025-01-01 00:00:00 - end_time: int = 1738368000 # 2025-02-01 00:00:00 - backtesting_resolution: str = "1m" - trade_cost: float = 0.0006 - config: Union[Dict, str] - - @router.post("/run-backtesting") async def run_backtesting(backtesting_config: BacktestingConfig): """ diff --git a/routers/databases.py b/routers/databases.py index 40f253fc..1cf9f120 100644 --- a/routers/databases.py +++ b/routers/databases.py @@ -8,10 +8,9 @@ from utils.etl_databases import HummingbotDatabase, ETLPerformance from fastapi import APIRouter -from utils.file_system import FileSystemUtil +from utils.file_system import fs_util router = APIRouter(tags=["Databases"], prefix="/databases") -file_system = FileSystemUtil() @router.get("/", response_model=List[str]) @@ -22,7 +21,7 @@ async def list_databases(): Returns: List of database file paths """ - return file_system.list_databases() + return fs_util.list_databases() @router.post("/read", response_model=List[Dict[str, Any]]) @@ -111,7 +110,7 @@ async def list_checkpoints(full_path: bool): Returns: List of checkpoint file paths or names """ - return file_system.list_checkpoints(full_path) + return fs_util.list_checkpoints(full_path) @router.post("/checkpoints/load") From 9df69895038cee622bcbc3537eaa9eca40ab2de3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:38:44 +0200 Subject: [PATCH 119/244] (feat) final version of scripts and controllers --- routers/controllers.py | 37 ++++++++++++++++++------------------- routers/scripts.py | 23 +++++++++++------------ 2 files changed, 29 insertions(+), 31 deletions(-) diff --git a/routers/controllers.py b/routers/controllers.py index 49b9a310..c94e57a9 100644 --- a/routers/controllers.py +++ b/routers/controllers.py @@ -5,11 +5,10 @@ from fastapi import APIRouter, HTTPException from starlette import status -from models import Controller, ControllerConfig, ControllerType -from utils.file_system import FileSystemUtil +from models import Controller, ControllerType +from utils.file_system import fs_util router = APIRouter(tags=["Controllers"], prefix="/controllers") -file_system = FileSystemUtil() @router.get("/", response_model=Dict[str, List[str]]) @@ -23,7 +22,7 @@ async def list_controllers(): result = {} for controller_type in ControllerType: try: - files = file_system.list_files(f'controllers/{controller_type.value}') + files = fs_util.list_files(f'controllers/{controller_type.value}') result[controller_type.value] = [ f.replace('.py', '') for f in files if f.endswith('.py') and f != "__init__.py" @@ -43,13 +42,13 @@ async def list_controller_configs(): List of controller configuration objects with name, controller_name, controller_type, and other metadata """ try: - config_files = [f for f in file_system.list_files('conf/controllers') if f.endswith('.yml')] + config_files = [f for f in fs_util.list_files('conf/controllers') if f.endswith('.yml')] configs = [] for config_file in config_files: config_name = config_file.replace('.yml', '') try: - config = file_system.read_yaml_file(f"conf/controllers/{config_file}") + config = fs_util.read_yaml_file(f"conf/controllers/{config_file}") configs.append({ "config_name": config_name, "controller_name": config.get("controller_name", "unknown"), @@ -87,7 +86,7 @@ async def get_controller_config(config_name: str): HTTPException: 404 if configuration not found """ try: - config = file_system.read_yaml_file(f"conf/controllers/{config_name}.yml") + config = fs_util.read_yaml_file(f"conf/controllers/{config_name}.yml") return config except FileNotFoundError: raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") @@ -110,7 +109,7 @@ async def create_or_update_controller_config(config_name: str, config: Dict): """ try: yaml_content = yaml.dump(config, default_flow_style=False) - file_system.add_file('conf/controllers', f"{config_name}.yml", yaml_content, override=True) + fs_util.add_file('conf/controllers', f"{config_name}.yml", yaml_content, override=True) return {"message": f"Configuration '{config_name}' saved successfully"} except Exception as e: raise HTTPException(status_code=400, detail=str(e)) @@ -131,7 +130,7 @@ async def delete_controller_config(config_name: str): HTTPException: 404 if configuration not found """ try: - file_system.delete_file('conf/controllers', f"{config_name}.yml") + fs_util.delete_file('conf/controllers', f"{config_name}.yml") return {"message": f"Configuration '{config_name}' deleted successfully"} except FileNotFoundError: raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") @@ -153,7 +152,7 @@ async def get_controller(controller_type: ControllerType, controller_name: str): HTTPException: 404 if controller not found """ try: - content = file_system.read_file(f"controllers/{controller_type.value}/{controller_name}.py") + content = fs_util.read_file(f"controllers/{controller_type.value}/{controller_name}.py") return { "name": controller_name, "type": controller_type.value, @@ -190,7 +189,7 @@ async def create_or_update_controller(controller_type: ControllerType, controlle ) try: - file_system.add_file( + fs_util.add_file( f'controllers/{controller_type.value}', f"{controller_name}.py", controller.content, @@ -217,7 +216,7 @@ async def delete_controller(controller_type: ControllerType, controller_name: st HTTPException: 404 if controller not found """ try: - file_system.delete_file(f'controllers/{controller_type.value}', f"{controller_name}.py") + fs_util.delete_file(f'controllers/{controller_type.value}', f"{controller_name}.py") return {"message": f"Controller '{controller_name}' deleted successfully from '{controller_type.value}'"} except FileNotFoundError: raise HTTPException( @@ -241,7 +240,7 @@ async def get_controller_config_template(controller_type: ControllerType, contro Raises: HTTPException: 404 if controller configuration class not found """ - config_class = file_system.load_controller_config_class(controller_type.value, controller_name) + config_class = fs_util.load_controller_config_class(controller_type.value, controller_name) if config_class is None: raise HTTPException( status_code=404, @@ -271,13 +270,13 @@ async def get_bot_controller_configs(bot_name: str): HTTPException: 404 if bot not found """ bots_config_path = f"instances/{bot_name}/conf/controllers" - if not file_system.path_exists(bots_config_path): + if not fs_util.path_exists(bots_config_path): raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") configs = [] - for controller_file in file_system.list_files(bots_config_path): + for controller_file in fs_util.list_files(bots_config_path): if controller_file.endswith('.yml'): - config = file_system.read_yaml_file(f"{bots_config_path}/{controller_file}") + config = fs_util.read_yaml_file(f"{bots_config_path}/{controller_file}") config['_config_name'] = controller_file.replace('.yml', '') configs.append(config) return configs @@ -300,13 +299,13 @@ async def update_bot_controller_config(bot_name: str, controller_name: str, conf HTTPException: 404 if bot or controller not found, 400 if update error """ bots_config_path = f"instances/{bot_name}/conf/controllers" - if not file_system.path_exists(bots_config_path): + if not fs_util.path_exists(bots_config_path): raise HTTPException(status_code=404, detail=f"Bot '{bot_name}' not found") try: - current_config = file_system.read_yaml_file(f"{bots_config_path}/{controller_name}.yml") + current_config = fs_util.read_yaml_file(f"{bots_config_path}/{controller_name}.yml") current_config.update(config) - file_system.dump_dict_to_yaml(f"{bots_config_path}/{controller_name}.yml", current_config) + fs_util.dump_dict_to_yaml(f"{bots_config_path}/{controller_name}.yml", current_config) return {"message": f"Controller configuration for bot '{bot_name}' updated successfully"} except FileNotFoundError: raise HTTPException( diff --git a/routers/scripts.py b/routers/scripts.py index 8a87ee34..c4c18409 100644 --- a/routers/scripts.py +++ b/routers/scripts.py @@ -6,10 +6,9 @@ from starlette import status from models import Script, ScriptConfig -from utils.file_system import FileSystemUtil +from utils.file_system import fs_util router = APIRouter(tags=["Scripts"], prefix="/scripts") -file_system = FileSystemUtil() @router.get("/", response_model=List[str]) @@ -20,7 +19,7 @@ async def list_scripts(): Returns: List of script names (without .py extension) """ - return [f.replace('.py', '') for f in file_system.list_files('scripts') if f.endswith('.py')] + return [f.replace('.py', '') for f in fs_util.list_files('scripts') if f.endswith('.py')] # Script Configuration endpoints (must come before script name routes) @@ -33,13 +32,13 @@ async def list_script_configs(): List of script configuration objects with name, script_file_name, and other metadata """ try: - config_files = [f for f in file_system.list_files('conf/scripts') if f.endswith('.yml')] + config_files = [f for f in fs_util.list_files('conf/scripts') if f.endswith('.yml')] configs = [] for config_file in config_files: config_name = config_file.replace('.yml', '') try: - config = file_system.read_yaml_file(f"conf/scripts/{config_file}") + config = fs_util.read_yaml_file(f"conf/scripts/{config_file}") configs.append({ "config_name": config_name, "script_file_name": config.get("script_file_name", "unknown"), @@ -75,7 +74,7 @@ async def get_script_config(config_name: str): HTTPException: 404 if configuration not found """ try: - config = file_system.read_yaml_file(f"conf/scripts/{config_name}.yml") + config = fs_util.read_yaml_file(f"conf/scripts/{config_name}.yml") return config except FileNotFoundError: raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") @@ -98,7 +97,7 @@ async def create_or_update_script_config(config_name: str, config: Dict): """ try: yaml_content = yaml.dump(config, default_flow_style=False) - file_system.add_file('conf/scripts', f"{config_name}.yml", yaml_content, override=True) + fs_util.add_file('conf/scripts', f"{config_name}.yml", yaml_content, override=True) return {"message": f"Configuration '{config_name}' saved successfully"} except Exception as e: raise HTTPException(status_code=400, detail=str(e)) @@ -119,7 +118,7 @@ async def delete_script_config(config_name: str): HTTPException: 404 if configuration not found """ try: - file_system.delete_file('conf/scripts', f"{config_name}.yml") + fs_util.delete_file('conf/scripts', f"{config_name}.yml") return {"message": f"Configuration '{config_name}' deleted successfully"} except FileNotFoundError: raise HTTPException(status_code=404, detail=f"Configuration '{config_name}' not found") @@ -140,7 +139,7 @@ async def get_script(script_name: str): HTTPException: 404 if script not found """ try: - content = file_system.read_file(f"scripts/{script_name}.py") + content = fs_util.read_file(f"scripts/{script_name}.py") return { "name": script_name, "content": content @@ -165,7 +164,7 @@ async def create_or_update_script(script_name: str, script: Script): HTTPException: 400 if save error occurs """ try: - file_system.add_file('scripts', f"{script_name}.py", script.content, override=True) + fs_util.add_file('scripts', f"{script_name}.py", script.content, override=True) return {"message": f"Script '{script_name}' saved successfully"} except Exception as e: raise HTTPException(status_code=400, detail=str(e)) @@ -186,7 +185,7 @@ async def delete_script(script_name: str): HTTPException: 404 if script not found """ try: - file_system.delete_file('scripts', f"{script_name}.py") + fs_util.delete_file('scripts', f"{script_name}.py") return {"message": f"Script '{script_name}' deleted successfully"} except FileNotFoundError: raise HTTPException(status_code=404, detail=f"Script '{script_name}' not found") @@ -206,7 +205,7 @@ async def get_script_config_template(script_name: str): Raises: HTTPException: 404 if script configuration class not found """ - config_class = file_system.load_script_config_class(script_name) + config_class = fs_util.load_script_config_class(script_name) if config_class is None: raise HTTPException(status_code=404, detail=f"Script configuration class for '{script_name}' not found") From dd86a7bdfe2bb7066cf4fbbb93941cec5f4c4322 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:38:56 +0200 Subject: [PATCH 120/244] (feat) add background image pulling --- routers/docker.py | 250 +++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 238 insertions(+), 12 deletions(-) diff --git a/routers/docker.py b/routers/docker.py index 6c0f03fb..a003a720 100644 --- a/routers/docker.py +++ b/routers/docker.py @@ -1,15 +1,98 @@ import logging import os +import time +from typing import Dict -from fastapi import APIRouter, HTTPException, Depends +from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks -from models import ImageName +from models import DockerImage from utils.bot_archiver import BotArchiver from services.docker_service import DockerService from deps import get_docker_service, get_bot_archiver router = APIRouter(tags=["Docker"], prefix="/docker") +# Global state to track image pulls (in production, consider using Redis or database) +_pull_status: Dict[str, Dict] = {} + +# Configuration for cleanup +PULL_STATUS_MAX_AGE_SECONDS = 3600 # Keep status for 1 hour +PULL_STATUS_MAX_ENTRIES = 100 # Maximum number of entries to keep + + +def _cleanup_old_pull_status(): + """Remove old entries to prevent memory growth""" + current_time = time.time() + to_remove = [] + + # Find entries older than max age + for image_name, status_info in _pull_status.items(): + # Skip ongoing pulls + if status_info["status"] == "pulling": + continue + + # Check age of completed/failed operations + end_time = status_info.get("completed_at") or status_info.get("failed_at") + if end_time and (current_time - end_time > PULL_STATUS_MAX_AGE_SECONDS): + to_remove.append(image_name) + + # Remove old entries + for image_name in to_remove: + del _pull_status[image_name] + logging.info(f"Cleaned up old pull status for {image_name}") + + # If still over limit, remove oldest completed/failed entries + if len(_pull_status) > PULL_STATUS_MAX_ENTRIES: + completed_entries = [ + (name, info) for name, info in _pull_status.items() + if info["status"] in ["completed", "failed"] + ] + # Sort by end time (oldest first) + completed_entries.sort( + key=lambda x: x[1].get("completed_at") or x[1].get("failed_at") or 0 + ) + + # Remove oldest entries to get under limit + excess_count = len(_pull_status) - PULL_STATUS_MAX_ENTRIES + for i in range(min(excess_count, len(completed_entries))): + del _pull_status[completed_entries[i][0]] + logging.info(f"Cleaned up excess pull status for {completed_entries[i][0]}") + + +def _background_pull_image(image_name: str, docker_manager: DockerService): + """Background task to pull Docker image""" + try: + _pull_status[image_name] = { + "status": "pulling", + "started_at": time.time(), + "progress": "Starting pull..." + } + + # Use the synchronous pull method in background + result = docker_manager.pull_image_sync(image_name) + + if result.get("success"): + _pull_status[image_name] = { + "status": "completed", + "started_at": _pull_status[image_name]["started_at"], + "completed_at": time.time(), + "result": result + } + else: + _pull_status[image_name] = { + "status": "failed", + "started_at": _pull_status[image_name]["started_at"], + "failed_at": time.time(), + "error": result.get("error", "Unknown error") + } + except Exception as e: + _pull_status[image_name] = { + "status": "failed", + "started_at": _pull_status[image_name].get("started_at", time.time()), + "failed_at": time.time(), + "error": str(e) + } + @router.get("/running") async def is_docker_running(docker_manager: DockerService = Depends(get_docker_service)): @@ -149,22 +232,165 @@ async def start_container(container_name: str, docker_manager: DockerService = D @router.post("/pull-image/") -async def pull_image(image: ImageName, docker_manager: DockerService = Depends(get_docker_service)): +async def pull_image(image: DockerImage, background_tasks: BackgroundTasks, + docker_manager: DockerService = Depends(get_docker_service)): """ - Pull a Docker image from a registry. + Initiate Docker image pull as background task. + Returns immediately with task status for monitoring. Args: - image: ImageName object containing the image name to pull + image: DockerImage object containing the image name to pull + background_tasks: FastAPI background tasks docker_manager: Docker service dependency Returns: - Result of the image pull operation + Status of the pull operation initiation + """ + image_name = image.image_name + + # Run cleanup before starting new pull + _cleanup_old_pull_status() + + # Check if pull is already in progress + if image_name in _pull_status: + current_status = _pull_status[image_name] + if current_status["status"] == "pulling": + return { + "message": f"Pull already in progress for {image_name}", + "status": "in_progress", + "started_at": current_status["started_at"], + "image_name": image_name + } + + # Start background pull + background_tasks.add_task(_background_pull_image, image_name, docker_manager) + + return { + "message": f"Pull started for {image_name}", + "status": "started", + "image_name": image_name, + "note": "Use GET /docker/pull-status/{image_name} to check progress" + } + + +@router.get("/pull-status/{image_name}") +async def get_pull_status(image_name: str): + """ + Get status of image pull operation. + + Args: + image_name: Name of the image to check pull status for + + Returns: + Dictionary with pull status, timing, and result information Raises: - HTTPException: 400 if pull operation fails + HTTPException: 404 if no pull operation found for this image """ - try: - result = docker_manager.pull_image(image.image_name) - return result - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) + if image_name not in _pull_status: + raise HTTPException(status_code=404, detail=f"No pull operation found for image '{image_name}'") + + status_info = _pull_status[image_name].copy() + + # Add duration information + start_time = status_info.get("started_at") + if start_time: + if status_info["status"] == "pulling": + status_info["duration_seconds"] = round(time.time() - start_time, 2) + elif "completed_at" in status_info: + status_info["duration_seconds"] = round(status_info["completed_at"] - start_time, 2) + elif "failed_at" in status_info: + status_info["duration_seconds"] = round(status_info["failed_at"] - start_time, 2) + + return { + "image_name": image_name, + **status_info + } + + +@router.get("/pull-status/") +async def list_pull_operations(): + """ + List all current and recent pull operations. + + Returns: + Dictionary with all pull operations and their statuses + """ + operations = {} + for image_name, status_info in _pull_status.items(): + status_copy = status_info.copy() + + # Add duration for each operation + start_time = status_copy.get("started_at") + if start_time: + if status_copy["status"] == "pulling": + status_copy["duration_seconds"] = round(time.time() - start_time, 2) + elif "completed_at" in status_copy: + status_copy["duration_seconds"] = round(status_copy["completed_at"] - start_time, 2) + elif "failed_at" in status_copy: + status_copy["duration_seconds"] = round(status_copy["failed_at"] - start_time, 2) + + operations[image_name] = status_copy + + return { + "pull_operations": operations, + "total_operations": len(operations) + } + + +@router.delete("/pull-status/{image_name}") +async def clear_pull_status(image_name: str): + """ + Clear pull status for completed or failed operations. + + Args: + image_name: Name of the image to clear status for + + Returns: + Success message when status is cleared + + Raises: + HTTPException: 400 if trying to clear ongoing operation, 404 if operation not found + """ + if image_name not in _pull_status: + raise HTTPException(status_code=404, detail=f"Pull operation for '{image_name}' not found") + + status = _pull_status[image_name]["status"] + if status == "pulling": + raise HTTPException( + status_code=400, + detail=f"Cannot clear status for ongoing pull operation. Current status: {status}" + ) + + del _pull_status[image_name] + return {"message": f"Cleared pull status for '{image_name}'"} + + +@router.delete("/pull-status/") +async def clear_all_completed_pull_status(): + """ + Clear all completed and failed pull operations from status tracking. + + Returns: + Summary of cleared operations + """ + cleared_count = 0 + cleared_images = [] + + # Create a list of items to remove to avoid modifying dict during iteration + to_remove = [] + for image_name, status_info in _pull_status.items(): + if status_info["status"] in ["completed", "failed"]: + to_remove.append(image_name) + + # Remove the completed/failed operations + for image_name in to_remove: + del _pull_status[image_name] + cleared_images.append(image_name) + cleared_count += 1 + + return { + "message": f"Cleared {cleared_count} completed/failed pull operations", + "cleared_images": cleared_images, + "remaining_operations": len(_pull_status) + } From abcd5050ca79471491261e3a8327a3015755c994 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:39:06 +0200 Subject: [PATCH 121/244] (feat) use models --- routers/bot_orchestration.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 9af54c25..6e9dbb79 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -4,7 +4,7 @@ from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks -from models import StartBotAction, StopBotAction, HummingbotInstanceConfig, V2ControllerDeployment +from models import StartBotAction, StopBotAction, V2ScriptDeployment, V2ControllerDeployment from services.bots_orchestrator import BotsOrchestrator from services.docker_service import DockerService from deps import get_bots_orchestrator, get_docker_service, get_bot_archiver @@ -335,7 +335,7 @@ async def stop_and_archive_bot( @router.post("/create-hummingbot-instance") async def create_hummingbot_instance( - config: HummingbotInstanceConfig, + config: V2ScriptDeployment, docker_manager: DockerService = Depends(get_docker_service) ): """ @@ -408,8 +408,8 @@ async def deploy_v2_controllers( logging.info(f"Generated script config: {script_config_filename} with content: {script_config_content}") - # Create the HummingbotInstanceConfig with the generated script config - instance_config = HummingbotInstanceConfig( + # Create the V2ScriptDeployment with the generated script config + instance_config = V2ScriptDeployment( instance_name=deployment.instance_name, credentials_profile=deployment.credentials_profile, image=deployment.image, From c7fdd0f60aafa4a1e49a40272f27eab671bf7fa6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:39:15 +0200 Subject: [PATCH 122/244] (feat) final trading features --- routers/trading.py | 215 ++++++++++++++++++++++----------------------- 1 file changed, 104 insertions(+), 111 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index e7825c23..58fd4677 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -2,125 +2,17 @@ from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, Query +from hummingbot.core.data_type.common import PositionMode from starlette import status from services.accounts_service import AccountsService from deps import get_accounts_service, get_market_data_feed_manager -from models import PaginatedResponse -from models.bot import TradeRequest, TradeResponse +from models import PaginatedResponse, TradeRequest, TradeResponse +from models.accounts import PositionModeRequest, LeverageRequest router = APIRouter(tags=["Trading"], prefix="/trading") -# Portfolio & Account State Monitoring -@router.get("/portfolio/state", response_model=Dict[str, Dict[str, List[Dict]]]) -async def get_portfolio_state(accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get the current state of all accounts portfolio. - - Returns: - Dict containing all account states with connector balances and token information - """ - return accounts_service.get_accounts_state() - - -@router.get("/portfolio/history", response_model=PaginatedResponse) -async def get_portfolio_history( - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get the historical state of all accounts portfolio with pagination. - """ - try: - data, next_cursor, has_more = await accounts_service.load_account_state_history( - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) - - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor - } - ) - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - -@router.get("/{account_name}/balance", response_model=Dict[str, List[Dict]]) -async def get_account_balance(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get current balance state of a specific account. - - Args: - account_name: Name of the account to get balance for - - Returns: - Dictionary mapping connector names to lists of token information - - Raises: - HTTPException: 404 if account not found - """ - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - return state - - -@router.get("/{account_name}/balance/history", response_model=PaginatedResponse) -async def get_account_balance_history( - account_name: str, - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get historical balance state of a specific account with pagination. - - Args: - account_name: Name of the account to get history for - limit: Number of items per page (1-1000) - cursor: Cursor for pagination (ISO timestamp) - start_time: Start time for filtering results - end_time: End time for filtering results - - Returns: - Paginated response with historical account balance data - """ - data, next_cursor, has_more = await accounts_service.get_account_state_history( - account_name=account_name, - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) - - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor, - "filters": { - "account_name": account_name, - "start_time": start_time.isoformat() if start_time else None, - "end_time": end_time.isoformat() if end_time else None - } - } - ) - # Trade Execution @router.post("/orders", response_model=TradeResponse, status_code=status.HTTP_201_CREATED) @@ -537,6 +429,107 @@ async def get_account_trades( # Trading Rules & Configuration +@router.post("/{account_name}/{connector_name}/position-mode") +async def set_position_mode( + account_name: str, + connector_name: str, + request: PositionModeRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Set position mode for a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + position_mode: Position mode to set (HEDGE or ONEWAY) + + Returns: + Success message with status + + Raises: + HTTPException: 400 if not a perpetual connector or invalid position mode + """ + try: + # Convert string to PositionMode enum + mode = PositionMode[request.position_mode.upper()] + result = await accounts_service.set_position_mode(account_name, connector_name, mode) + return result + except KeyError: + raise HTTPException( + status_code=400, + detail=f"Invalid position mode '{request.position_mode}'. Must be 'HEDGE' or 'ONEWAY'" + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/{account_name}/{connector_name}/position-mode") +async def get_position_mode( + account_name: str, + connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get current position mode for a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + + Returns: + Dictionary with current position mode, connector name, and account name + + Raises: + HTTPException: 400 if not a perpetual connector + """ + try: + result = await accounts_service.get_position_mode(account_name, connector_name) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/{account_name}/{connector_name}/leverage") +async def set_leverage( + account_name: str, + connector_name: str, + request: LeverageRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Set leverage for a specific trading pair on a perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + request: Leverage request with trading pair and leverage value + accounts_service: Injected accounts service + + Returns: + Dictionary with success status and message + + Raises: + HTTPException: 400 for invalid parameters or non-perpetual connector, 404 for account/connector not found, 500 for execution errors + """ + try: + result = await accounts_service.set_leverage( + account_name=account_name, + connector_name=connector_name, + trading_pair=request.trading_pair, + leverage=request.leverage + ) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") + + @router.get("/{account_name}/{connector_name}/order-types") async def get_supported_order_types(account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): From 92520602493be3a0bd099984942cebc79f125b2f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:39:29 +0200 Subject: [PATCH 123/244] (feat) simplify file system interactinos --- services/accounts_service.py | 221 ++++++++++++++++++++++++++++++----- services/docker_service.py | 17 ++- utils/connector_manager.py | 51 +++----- utils/file_system.py | 16 ++- utils/security.py | 14 +-- 5 files changed, 238 insertions(+), 81 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 6b47b334..837947db 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -12,9 +12,8 @@ from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository from services.market_data_feed_manager import MarketDataFeedManager from utils.connector_manager import ConnectorManager -from utils.file_system import FileSystemUtil -file_system = FileSystemUtil() +from utils.file_system import fs_util class AccountsService: @@ -180,9 +179,7 @@ async def _ensure_account_connectors_initialized(self, account_name: str): # Only initialize if connector doesn't exist if not self.connector_manager.is_connector_initialized(account_name, connector_name): # Get connector will now handle all initialization - connector = await self.connector_manager.get_connector(account_name, connector_name) - # Force initial balance update to ensure first dump has data - await connector._update_balances() + await self.connector_manager.get_connector(account_name, connector_name) except Exception as e: logging.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") @@ -260,18 +257,13 @@ async def add_credentials(self, account_name: str, connector_name: str, credenti # Update the connector keys (this saves the credentials to file) await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) - # Initialize the connector with tracking - # Get connector will now handle all initialization - connector = await self.connector_manager.get_connector(account_name, connector_name) - # Force initial balance update to ensure first dump has data - await connector._update_balances() @staticmethod def list_accounts(): """ List all the accounts that are connected to the trading system. :return: List of accounts. """ - return file_system.list_folders('credentials') + return fs_util.list_folders('credentials') @staticmethod def list_credentials(account_name: str): @@ -281,7 +273,7 @@ def list_credentials(account_name: str): :return: List of credentials. """ try: - return [file for file in file_system.list_files(f'credentials/{account_name}/connectors') if + return [file for file in fs_util.list_files(f'credentials/{account_name}/connectors') if file.endswith('.yml')] except FileNotFoundError as e: raise HTTPException(status_code=404, detail=str(e)) @@ -293,8 +285,8 @@ async def delete_credentials(self, account_name: str, connector_name: str): :param connector_name: :return: """ - if file_system.path_exists(f"credentials/{account_name}/connectors/{connector_name}.yml"): - file_system.delete_file(directory=f"credentials/{account_name}/connectors", file_name=f"{connector_name}.yml") + if fs_util.path_exists(f"credentials/{account_name}/connectors/{connector_name}.yml"): + fs_util.delete_file(directory=f"credentials/{account_name}/connectors", file_name=f"{connector_name}.yml") # Stop the connector if it's running await self.connector_manager.stop_connector(account_name, connector_name) @@ -317,10 +309,10 @@ def add_account(self, account_name: str): raise HTTPException(status_code=400, detail="Account already exists.") files_to_copy = ["conf_client.yml", "conf_fee_overrides.yml", "hummingbot_logs.yml", ".password_verification"] - file_system.create_folder('credentials', account_name) - file_system.create_folder(f'credentials/{account_name}', "connectors") + fs_util.create_folder('credentials', account_name) + fs_util.create_folder(f'credentials/{account_name}', "connectors") for file in files_to_copy: - file_system.copy_file(f"credentials/master_account/{file}", f"credentials/{account_name}/{file}") + fs_util.copy_file(f"credentials/master_account/{file}", f"credentials/{account_name}/{file}") # Initialize account state self.accounts_state[account_name] = {} @@ -336,7 +328,7 @@ async def delete_account(self, account_name: str): await self.connector_manager.stop_connector(account_name, connector_name) # Delete account folder - file_system.delete_folder('credentials', account_name) + fs_util.delete_folder('credentials', account_name) # Remove from account state if account_name in self.accounts_state: @@ -489,6 +481,175 @@ async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[ return portfolio + def get_portfolio_distribution(self, account_name: Optional[str] = None) -> Dict[str, any]: + """ + Get portfolio distribution by tokens with percentages. + """ + try: + # Get accounts to process + accounts_to_process = [account_name] if account_name else list(self.accounts_state.keys()) + + # Aggregate all tokens across accounts and connectors + token_values = {} + total_value = 0 + + for acc_name in accounts_to_process: + if acc_name in self.accounts_state: + for connector_name, connector_data in self.accounts_state[acc_name].items(): + for token_info in connector_data: + token = token_info.get("token", "") + value = token_info.get("value", 0) + + if token not in token_values: + token_values[token] = { + "token": token, + "total_value": 0, + "total_units": 0, + "accounts": {} + } + + token_values[token]["total_value"] += value + token_values[token]["total_units"] += token_info.get("units", 0) + total_value += value + + # Track by account + if acc_name not in token_values[token]["accounts"]: + token_values[token]["accounts"][acc_name] = { + "value": 0, + "units": 0, + "connectors": {} + } + + token_values[token]["accounts"][acc_name]["value"] += value + token_values[token]["accounts"][acc_name]["units"] += token_info.get("units", 0) + + # Track by connector within account + if connector_name not in token_values[token]["accounts"][acc_name]["connectors"]: + token_values[token]["accounts"][acc_name]["connectors"][connector_name] = { + "value": 0, + "units": 0 + } + + token_values[token]["accounts"][acc_name]["connectors"][connector_name]["value"] += value + token_values[token]["accounts"][acc_name]["connectors"][connector_name]["units"] += token_info.get("units", 0) + + # Calculate percentages + distribution = [] + for token_data in token_values.values(): + percentage = (token_data["total_value"] / total_value * 100) if total_value > 0 else 0 + + token_dist = { + "token": token_data["token"], + "total_value": round(token_data["total_value"], 6), + "total_units": token_data["total_units"], + "percentage": round(percentage, 4), + "accounts": {} + } + + # Add account-level percentages + for acc_name, acc_data in token_data["accounts"].items(): + acc_percentage = (acc_data["value"] / total_value * 100) if total_value > 0 else 0 + token_dist["accounts"][acc_name] = { + "value": round(acc_data["value"], 6), + "units": acc_data["units"], + "percentage": round(acc_percentage, 4), + "connectors": {} + } + + # Add connector-level data + for conn_name, conn_data in acc_data["connectors"].items(): + token_dist["accounts"][acc_name]["connectors"][conn_name] = { + "value": round(conn_data["value"], 6), + "units": conn_data["units"] + } + + distribution.append(token_dist) + + # Sort by value (descending) + distribution.sort(key=lambda x: x["total_value"], reverse=True) + + return { + "total_portfolio_value": round(total_value, 6), + "token_count": len(distribution), + "distribution": distribution, + "account_filter": account_name if account_name else "all_accounts" + } + + except Exception as e: + logging.error(f"Error calculating portfolio distribution: {e}") + return { + "total_portfolio_value": 0, + "token_count": 0, + "distribution": [], + "account_filter": account_name if account_name else "all_accounts", + "error": str(e) + } + + def get_account_distribution(self) -> Dict[str, any]: + """ + Get portfolio distribution by accounts with percentages. + """ + try: + account_values = {} + total_value = 0 + + for acc_name, account_data in self.accounts_state.items(): + account_value = 0 + connector_values = {} + + for connector_name, connector_data in account_data.items(): + connector_value = 0 + for token_info in connector_data: + value = token_info.get("value", 0) + connector_value += value + account_value += value + + connector_values[connector_name] = round(connector_value, 6) + + account_values[acc_name] = { + "total_value": round(account_value, 6), + "connectors": connector_values + } + total_value += account_value + + # Calculate percentages + distribution = [] + for acc_name, acc_data in account_values.items(): + percentage = (acc_data["total_value"] / total_value * 100) if total_value > 0 else 0 + + connector_dist = {} + for conn_name, conn_value in acc_data["connectors"].items(): + conn_percentage = (conn_value / total_value * 100) if total_value > 0 else 0 + connector_dist[conn_name] = { + "value": conn_value, + "percentage": round(conn_percentage, 4) + } + + distribution.append({ + "account": acc_name, + "total_value": acc_data["total_value"], + "percentage": round(percentage, 4), + "connectors": connector_dist + }) + + # Sort by value (descending) + distribution.sort(key=lambda x: x["total_value"], reverse=True) + + return { + "total_portfolio_value": round(total_value, 6), + "account_count": len(distribution), + "distribution": distribution + } + + except Exception as e: + logging.error(f"Error calculating account distribution: {e}") + return { + "total_portfolio_value": 0, + "account_count": 0, + "distribution": [], + "error": str(e) + } + async def place_trade(self, account_name: str, connector_name: str, trading_pair: str, trade_type: TradeType, amount: Decimal, order_type: OrderType = OrderType.LIMIT, price: Optional[Decimal] = None, position_action: PositionAction = PositionAction.OPEN, @@ -522,7 +683,7 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found for account '{account_name}'") # Get the connector instance - connector = self.connector_manager.get_connector(account_name, connector_name) + connector = await self.connector_manager.get_connector(account_name, connector_name) # Validate price for limit orders if order_type in [OrderType.LIMIT, OrderType.LIMIT_MAKER] and price is None: @@ -567,9 +728,15 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair quantized_price = connector.quantize_order_price(trading_pair, price) notional_size = quantized_price * quantized_amount else: - # For market orders, use current price - current_price = connector.get_price(trading_pair, False) - notional_size = current_price * quantized_amount + # For market orders without price, get current market price for validation + if market_data_manager: + try: + prices = await market_data_manager.get_prices(connector_name, [trading_pair]) + if trading_pair in prices and "error" not in prices: + price = Decimal(str(prices[trading_pair])) + except Exception as e: + logging.error(f"Error getting market price for {trading_pair}: {e}") + notional_size = price * quantized_amount if notional_size < trading_rule.min_notional_size: raise HTTPException( @@ -578,15 +745,7 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair f"Increase the amount or price to meet the minimum requirement." ) - # For market orders without price, get current market price for validation - if order_type == OrderType.MARKET and price is None: - if market_data_manager: - try: - prices = await market_data_manager.get_prices(connector_name, [trading_pair]) - if trading_pair in prices and "error" not in prices: - price = Decimal(str(prices[trading_pair])) - except Exception as e: - logging.error(f"Error getting market price for {trading_pair}: {e}") + try: # Place the order using the connector with quantized values diff --git a/services/docker_service.py b/services/docker_service.py index 7dec7e37..34ff6303 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -7,10 +7,7 @@ from docker.types import LogConfig from config import settings -from models import HummingbotInstanceConfig -from utils.file_system import FileSystemUtil - -file_system = FileSystemUtil() +from models import V2ScriptDeployment class DockerService: @@ -39,10 +36,18 @@ def get_available_images(self): def pull_image(self, image_name): try: - self.client.images.pull(image_name) + return self.client.images.pull(image_name) except DockerException as e: return str(e) + def pull_image_sync(self, image_name): + """Synchronous pull operation for background tasks""" + try: + result = self.client.images.pull(image_name) + return {"success": True, "image": image_name, "result": str(result)} + except DockerException as e: + return {"success": False, "error": str(e)} + def get_exited_containers(self): try: containers_info = [{"id": container.id, "name": container.name, "status": container.status} for @@ -102,7 +107,7 @@ def remove_container(self, container_name, force=True): except DockerException as e: return {"success": False, "message": str(e)} - def create_hummingbot_instance(self, config: HummingbotInstanceConfig): + def create_hummingbot_instance(self, config: V2ScriptDeployment): bots_path = os.environ.get('BOTS_PATH', self.SOURCE_PATH) # Default to 'SOURCE_PATH' if BOTS_PATH is not set instance_name = f"hummingbot-{config.instance_name}" instance_dir = os.path.join("bots", 'instances', instance_name) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 33aabb5c..657368a3 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -12,8 +12,8 @@ from hummingbot.core.utils.async_utils import safe_ensure_future from utils.backend_api_config_adapter import BackendAPIConfigAdapter +from utils.file_system import FileSystemUtil, fs_util from utils.security import BackendAPISecurity -from utils.file_system import FileSystemUtil class ConnectorManager: @@ -28,7 +28,6 @@ def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): self.db_manager = db_manager self._connector_cache: Dict[str, ConnectorBase] = {} self._orders_recorders: Dict[str, any] = {} - self._file_system = FileSystemUtil() async def get_connector(self, account_name: str, connector_name: str): """ @@ -66,6 +65,11 @@ def _create_connector(self, account_name: str, connector_name: str): client_config_map = ClientConfigAdapter(ClientConfigMap()) conn_setting = AllConnectorSettings.get_connector_settings()[connector_name] keys = BackendAPISecurity.api_keys(connector_name) + + # Debug logging + logging.info(f"Creating connector {connector_name} for account {account_name}") + logging.info(f"API keys retrieved: {list(keys.keys()) if keys else 'None'}") + read_only_config = ReadOnlyClientConfigAdapter.lock_config(client_config_map) init_params = conn_setting.conn_init_parameters( @@ -75,6 +79,9 @@ def _create_connector(self, account_name: str, connector_name: str): client_config_map=read_only_config, ) + # Debug logging + logging.info(f"Init params keys: {list(init_params.keys())}") + connector_class = get_connector_class(connector_name) connector = connector_class(**init_params) return connector @@ -126,12 +133,14 @@ async def update_connector_keys(self, account_name: str, connector_name: str, ke BackendAPISecurity.update_connector_keys(account_name, connector_config) + # Re-decrypt all credentials to ensure the new keys are available + BackendAPISecurity.decrypt_all(account_name=account_name) + # Clear the cache for this connector to force recreation with new keys self.clear_cache(account_name, connector_name) # Create and return new connector instance - new_connector = self.get_connector(account_name, connector_name) - await new_connector._update_balances() + new_connector = await self.get_connector(account_name, connector_name) return new_connector @@ -225,8 +234,10 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na await connector._update_balances() # Set default position mode to HEDGE for perpetual connectors - await self._set_default_position_mode(connector) - + if "_perpetual" in connector_name: + if PositionMode.HEDGE in connector.supported_position_modes(): + connector.set_position_mode(PositionMode.HEDGE) + logging.info(f"Initialized connector {connector_name} for account {account_name}") return connector @@ -249,32 +260,6 @@ def _start_network_without_order_book(self, connector: ExchangePyBase): except Exception as e: logging.error(f"Error starting connector network without order book: {e}") - async def _set_default_position_mode(self, connector): - """ - Set default position mode to HEDGE for perpetual connectors that support position modes. - - :param connector: The connector instance - """ - try: - # Check if this is a perpetual connector - if "_perpetual" in connector.name and hasattr(connector, 'set_position_mode'): - # Check if HEDGE mode is supported - if hasattr(connector, 'supported_position_modes'): - supported_modes = connector.supported_position_modes() - if PositionMode.HEDGE in supported_modes: - # Try to call the method - it might be sync or async - result = connector.set_position_mode(PositionMode.HEDGE) - # If it's a coroutine, await it - if asyncio.iscoroutine(result): - await result - logging.info(f"Set default position mode to HEDGE for {connector.name}") - else: - logging.info(f"HEDGE mode not supported for {connector.name}, skipping position mode setup") - else: - logging.info(f"Position modes not supported for {connector.name}, skipping position mode setup") - except Exception as e: - logging.warning(f"Failed to set default position mode for {connector.name}: {e}") - async def stop_connector(self, account_name: str, connector_name: str): """ Stop a connector and its associated services. @@ -321,7 +306,7 @@ def list_available_credentials(self, account_name: str) -> List[str]: :return: List of connector names that have credentials. """ try: - files = self._file_system.list_files(f'credentials/{account_name}/connectors') + files = fs_util.list_files(f'credentials/{account_name}/connectors') return [file.replace('.yml', '') for file in files if file.endswith('.yml')] except FileNotFoundError: return [] \ No newline at end of file diff --git a/utils/file_system.py b/utils/file_system.py index cfbc4d61..c516eb29 100644 --- a/utils/file_system.py +++ b/utils/file_system.py @@ -21,16 +21,24 @@ class FileSystemUtil: as well as dynamic loading of script configurations. All file operations are performed relative to the base_path unless an absolute path is provided. + Implements singleton pattern to ensure the same instance is reused. """ + _instance = None base_path: str = "bots" # Default base path + def __new__(cls, base_path: Optional[str] = None): + if cls._instance is None: + cls._instance = super(FileSystemUtil, cls).__new__(cls) + cls._instance.base_path = base_path if base_path else "bots" + return cls._instance + def __init__(self, base_path: Optional[str] = None): """ Initializes the FileSystemUtil with a base path. :param base_path: The base directory path for file operations. """ - if base_path: - self.base_path = base_path + # Singleton pattern - instance already configured in __new__ + pass def _get_full_path(self, path: str) -> str: """ @@ -321,7 +329,7 @@ def get_connector_keys_path(self, account_name: str, connector_name: str) -> Pat :param connector_name: Name of the connector. :return: Path to the connector credentials file. """ - return Path(self.base_path) / "credentials" / account_name / "connectors" / f"{connector_name}.yml" + return Path("credentials") / account_name / "connectors" / f"{connector_name}.yml" def save_model_to_yml(self, yml_path: str, cm: ClientConfigAdapter) -> None: """ @@ -438,3 +446,5 @@ def list_checkpoints(self, full_path: bool = False) -> List[str]: except (OSError, PermissionError) as e: logging.warning(f"Error listing checkpoints in '{dir_path}': {e}") return [] + +fs_util = FileSystemUtil() \ No newline at end of file diff --git a/utils/security.py b/utils/security.py index ff697b42..f715c54d 100644 --- a/utils/security.py +++ b/utils/security.py @@ -11,13 +11,11 @@ from hummingbot.client.config.security import Security from config import settings -from utils.file_system import FileSystemUtil from utils.backend_api_config_adapter import BackendAPIConfigAdapter +from utils.file_system import fs_util class BackendAPISecurity(Security): - fs_util = FileSystemUtil(base_path="bots/credentials") - @classmethod def login_account(cls, account_name: str, secrets_manager: BaseSecretsManager) -> bool: if not cls.validate_password(secrets_manager): @@ -30,10 +28,10 @@ def login_account(cls, account_name: str, secrets_manager: BaseSecretsManager) - def decrypt_all(cls, account_name: str = "master_account"): cls._secure_configs.clear() cls._decryption_done.clear() - encrypted_files = [file for file in cls.fs_util.list_files(directory=f"{account_name}/connectors") if + encrypted_files = [file for file in fs_util.list_files(directory=f"credentials/{account_name}/connectors") if file.endswith(".yml")] for file in encrypted_files: - path = Path(cls.fs_util.base_path + f"/{account_name}/connectors/" + file) + path = Path(fs_util.base_path + f"/credentials/{account_name}/connectors/" + file) cls.decrypt_connector_config(path) cls._decryption_done.set() @@ -54,9 +52,9 @@ def load_connector_config_map_from_file(cls, yml_path: Path) -> BackendAPIConfig @classmethod def update_connector_keys(cls, account_name: str, connector_config: ClientConfigAdapter): connector_name = connector_config.connector - file_path = cls.fs_util.get_connector_keys_path(account_name=account_name, connector_name=connector_name) + file_path = fs_util.get_connector_keys_path(account_name=account_name, connector_name=connector_name) cm_yml_str = connector_config.generate_yml_output_str_with_comments() - cls.fs_util.ensure_file_and_dump_text(file_path, cm_yml_str) + fs_util.ensure_file_and_dump_text(str(file_path), cm_yml_str) update_connector_hb_config(connector_config) cls._secure_configs[connector_name] = connector_config @@ -67,7 +65,7 @@ def new_password_required() -> bool: @staticmethod def store_password_verification(secrets_manager: BaseSecretsManager): encrypted_word = secrets_manager.encrypt_secret_value(PASSWORD_VERIFICATION_WORD, PASSWORD_VERIFICATION_WORD) - FileSystemUtil.ensure_file_and_dump_text(settings.app.password_verification_path, encrypted_word) + fs_util.ensure_file_and_dump_text(settings.app.password_verification_path, encrypted_word) @staticmethod def validate_password(secrets_manager: BaseSecretsManager) -> bool: From f6fd426a51e9d56f34790d7613d6739ead19f022 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 17:39:35 +0200 Subject: [PATCH 124/244] (feat) remove patches --- main.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/main.py b/main.py index ff5aeb14..39293dc1 100644 --- a/main.py +++ b/main.py @@ -45,10 +45,6 @@ # Load environment variables early load_dotenv() -# Apply patches for third-party libraries -from patches import apply_config_helpers_patch -apply_config_helpers_patch() - # Get settings from Pydantic Settings username = settings.security.username password = settings.security.password @@ -64,7 +60,7 @@ async def lifespan(app: FastAPI): Lifespan context manager for the FastAPI application. Handles startup and shutdown events. """ - # Ensure password verification file exists FIRST + # Ensure password verification file exists if BackendAPISecurity.new_password_required(): # Create secrets manager with CONFIG_PASSWORD secrets_manager = ETHKeyFileSecretManger(password=settings.security.config_password) From 86f0ebaad2c6f200375bdf45b42857b72ae250bc Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 20 Jun 2025 19:31:47 +0200 Subject: [PATCH 125/244] (feat) improve fs_util --- routers/bot_orchestration.py | 18 +++++------------- services/docker_service.py | 12 ++++++++---- 2 files changed, 13 insertions(+), 17 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 6e9dbb79..cfebc7b3 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -8,7 +8,7 @@ from services.bots_orchestrator import BotsOrchestrator from services.docker_service import DockerService from deps import get_bots_orchestrator, get_docker_service, get_bot_archiver -from utils.file_system import FileSystemUtil +from utils.file_system import fs_util from utils.bot_archiver import BotArchiver router = APIRouter(tags=["Bot Orchestration"], prefix="/bot-orchestration") @@ -153,7 +153,6 @@ async def stop_bot(action: StopBotAction, bots_manager: BotsOrchestrator = Depen async def _background_stop_and_archive( bot_name: str, - actual_bot_name: str, container_name: str, bot_name_for_orchestrator: str, skip_order_cancellation: bool, @@ -191,11 +190,7 @@ async def _background_stop_and_archive( for i in range(max_retries): logging.info(f"Attempting to stop container {container_name} (attempt {i+1}/{max_retries})") - stop_container_response = docker_manager.stop_container(container_name) - - if stop_container_response.get("success", False): - container_stopped = True - break + docker_manager.stop_container(container_name) # Check if container is already stopped container_status = docker_manager.get_container_status(container_name) @@ -247,10 +242,8 @@ async def stop_and_archive_bot( bot_name: str, background_tasks: BackgroundTasks, skip_order_cancellation: bool = True, - async_backend: bool = True, archive_locally: bool = True, s3_bucket: str = None, - timeout: float = 30.0, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator), docker_manager: DockerService = Depends(get_docker_service), bot_archiver: BotArchiver = Depends(get_bot_archiver) @@ -305,8 +298,7 @@ async def stop_and_archive_bot( # Add the background task background_tasks.add_task( _background_stop_and_archive, - bot_name=bot_name, - actual_bot_name=actual_bot_name, + bot_name=actual_bot_name, container_name=container_name, bot_name_for_orchestrator=bot_name_for_orchestrator, skip_order_cancellation=skip_order_cancellation, @@ -400,11 +392,11 @@ async def deploy_v2_controllers( script_config_content["max_controller_drawdown"] = deployment.max_controller_drawdown # Save the script config to the scripts directory - scripts_dir = os.path.join("bots", "conf", "scripts") + scripts_dir = os.path.join("conf", "scripts") os.makedirs(scripts_dir, exist_ok=True) script_config_path = os.path.join(scripts_dir, script_config_filename) - FileSystemUtil.dump_dict_to_yaml(script_config_path, script_config_content) + fs_util.dump_dict_to_yaml(script_config_path, script_config_content) logging.info(f"Generated script config: {script_config_filename} with content: {script_config_content}") diff --git a/services/docker_service.py b/services/docker_service.py index 34ff6303..563f3246 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -8,6 +8,7 @@ from config import settings from models import V2ScriptDeployment +from utils.file_system import fs_util class DockerService: @@ -145,7 +146,9 @@ def create_hummingbot_instance(self, config: V2ScriptDeployment): # Load the script config to find referenced controllers try: - script_config_content = FileSystemUtil.read_yaml_file(source_script_config_file) + # Path relative to fs_util base_path (which is "bots") + script_config_relative_path = f"conf/scripts/{config.script_config}" + script_config_content = fs_util.read_yaml_file(script_config_relative_path) controllers_list = script_config_content.get('controllers_config', []) # If there are controllers referenced, copy them @@ -166,10 +169,11 @@ def create_hummingbot_instance(self, config: V2ScriptDeployment): logging.error(f"Error reading script config file {config.script_config}: {e}") else: logging.warning(f"Script config file {config.script_config} not found in {script_config_dir}") - conf_file_path = f"{instance_dir}/conf/conf_client.yml" - client_config = FileSystemUtil.read_yaml_file(conf_file_path) + # Path relative to fs_util base_path (which is "bots") + conf_file_path = f"instances/{instance_name}/conf/conf_client.yml" + client_config = fs_util.read_yaml_file(conf_file_path) client_config['instance_id'] = instance_name - FileSystemUtil.dump_dict_to_yaml(conf_file_path, client_config) + fs_util.dump_dict_to_yaml(conf_file_path, client_config) # Set up Docker volumes volumes = { From 10f92feb70a914e05844f5ac5630e2ce2911fa4d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:55:51 +0200 Subject: [PATCH 126/244] (feat) add funding recorder --- services/funding_recorder.py | 142 +++++++++++++++++++++++++++++++++++ 1 file changed, 142 insertions(+) create mode 100644 services/funding_recorder.py diff --git a/services/funding_recorder.py b/services/funding_recorder.py new file mode 100644 index 00000000..2be61a90 --- /dev/null +++ b/services/funding_recorder.py @@ -0,0 +1,142 @@ +import asyncio +import logging +from datetime import datetime +from decimal import Decimal, InvalidOperation +from typing import Dict, Optional + +from hummingbot.connector.connector_base import ConnectorBase +from hummingbot.core.event.event_forwarder import SourceInfoEventForwarder +from hummingbot.core.event.events import MarketEvent, FundingPaymentCompletedEvent + +from database import AsyncDatabaseManager, FundingRepository + + +class FundingRecorder: + """ + Records funding payment events and associates them with position data. + Follows the same pattern as OrdersRecorder for consistency. + """ + + def __init__(self, db_manager: AsyncDatabaseManager, account_name: str, connector_name: str): + self.db_manager = db_manager + self.account_name = account_name + self.connector_name = connector_name + self._connector: Optional[ConnectorBase] = None + self.logger = logging.getLogger(__name__) + + # Create event forwarder for funding payments + self._funding_payment_forwarder = SourceInfoEventForwarder(self._did_funding_payment) + + # Event pairs mapping events to forwarders + self._event_pairs = [ + (MarketEvent.FundingPaymentCompleted, self._funding_payment_forwarder), + ] + + def start(self, connector: ConnectorBase): + """Start recording funding payments for the given connector""" + self._connector = connector + + # Subscribe to funding payment events + for event, forwarder in self._event_pairs: + connector.add_listener(event, forwarder) + + logging.info(f"FundingRecorder started for {self.account_name}/{self.connector_name}") + + async def stop(self): + """Stop recording funding payments""" + if self._connector: + for event, forwarder in self._event_pairs: + self._connector.remove_listener(event, forwarder) + logging.info(f"FundingRecorder stopped for {self.account_name}/{self.connector_name}") + + def _did_funding_payment(self, event_tag: int, market: ConnectorBase, event: FundingPaymentCompletedEvent): + """Handle funding payment events - called by SourceInfoEventForwarder""" + try: + asyncio.create_task(self._handle_funding_payment(event)) + except Exception as e: + logging.error(f"Error in _did_funding_payment: {e}") + + async def _handle_funding_payment(self, event: FundingPaymentCompletedEvent): + """Handle funding payment events""" + # Get current position data if available + position_data = None + if self._connector and hasattr(self._connector, 'account_positions'): + try: + positions = self._connector.account_positions + if positions: + for position in positions.values(): + if position.trading_pair == event.trading_pair: + position_data = { + "size": float(position.amount), + "side": position.position_side.name if hasattr(position.position_side, 'name') else str(position.position_side), + } + break + except Exception as e: + logging.warning(f"Could not get position data for funding payment: {e}") + + # Record the funding payment + await self.record_funding_payment(event, self.account_name, self.connector_name, position_data) + + async def record_funding_payment(self, event: FundingPaymentCompletedEvent, + account_name: str, connector_name: str, + position_data: Optional[Dict] = None): + """ + Record a funding payment event with optional position association. + + Args: + event: FundingPaymentCompletedEvent from Hummingbot + account_name: Account name + connector_name: Connector name + position_data: Optional position data at time of payment + """ + try: + # Validate and convert funding data + funding_rate = Decimal(str(event.funding_rate)) + funding_payment = Decimal(str(event.amount)) + + # Create funding payment record + funding_data = { + "funding_payment_id": f"{connector_name}_{event.trading_pair}_{event.timestamp.timestamp()}", + "timestamp": event.timestamp, + "account_name": account_name, + "connector_name": connector_name, + "trading_pair": event.trading_pair, + "funding_rate": float(funding_rate), + "funding_payment": float(funding_payment), + "fee_currency": getattr(event, 'fee_currency', 'USDT'), # Default to USDT if not provided + "exchange_funding_id": getattr(event, 'exchange_funding_id', None), + } + + # Add position data if provided + if position_data: + funding_data.update({ + "position_size": float(position_data.get("size", 0)), + "position_side": position_data.get("side"), + }) + + # Save to database + async with self.db_manager.get_session() as session: + funding_repo = FundingRepository(session) + + # Check if funding payment already exists + if await funding_repo.funding_payment_exists(funding_data["funding_payment_id"]): + self.logger.info(f"Funding payment {funding_data['funding_payment_id']} already exists, skipping") + return + + funding_payment = await funding_repo.create_funding_payment(funding_data) + await session.commit() + + self.logger.info( + f"Recorded funding payment for {account_name}/{connector_name}: " + f"{event.trading_pair} - Rate: {funding_rate}, Payment: {funding_payment} " + f"{funding_data['fee_currency']}" + ) + + return funding_payment + + except (ValueError, InvalidOperation) as e: + self.logger.error(f"Error processing funding payment for {event.trading_pair}: {e}, skipping update") + return + except Exception as e: + self.logger.error(f"Unexpected error recording funding payment: {e}") + return \ No newline at end of file From e36dbe65f6e12cfbdb166b82b086d6911e0e83f4 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:56:05 +0200 Subject: [PATCH 127/244] (feat) fix orders recorder when null --- services/orders_recorder.py | 62 ++++++++++++++++++++++++------------- 1 file changed, 40 insertions(+), 22 deletions(-) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 288cfc9d..458d4f74 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -1,8 +1,10 @@ import asyncio import logging +import math +import time from typing import Any, Optional, Union from datetime import datetime -from decimal import Decimal +from decimal import Decimal, InvalidOperation from hummingbot.core.event.event_forwarder import SourceInfoEventForwarder from hummingbot.core.event.events import ( @@ -189,29 +191,45 @@ async def _handle_order_filled(self, event: OrderFilledEvent): trade_fee_paid = 0 trade_fee_currency = None - # Update order with fill information - order = await order_repo.update_order_fill( - client_order_id=event.order_id, - filled_amount=Decimal(str(event.amount)), - average_fill_price=Decimal(str(event.price)), - fee_paid=Decimal(str(trade_fee_paid)) if trade_fee_paid else None, - fee_currency=trade_fee_currency - ) + # Update order with fill information (handle potential NaN values like Hummingbot does) + try: + filled_amount = Decimal(str(event.amount)) + average_fill_price = Decimal(str(event.price)) + fee_paid_decimal = Decimal(str(trade_fee_paid)) if trade_fee_paid else None + + order = await order_repo.update_order_fill( + client_order_id=event.order_id, + filled_amount=filled_amount, + average_fill_price=average_fill_price, + fee_paid=fee_paid_decimal, + fee_currency=trade_fee_currency + ) + except (ValueError, InvalidOperation) as e: + logging.error(f"Error processing order fill for {event.order_id}: {e}, skipping update") + return - # Create trade record + # Create trade record using validated values if order: - trade_data = { - "order_id": order.id, - "trade_id": f"{event.order_id}_{event.timestamp}", - "timestamp": datetime.fromtimestamp(event.timestamp), - "trading_pair": event.trading_pair, - "trade_type": event.trade_type.name, - "amount": float(event.amount), - "price": float(event.price), - "fee_paid": trade_fee_paid, - "fee_currency": trade_fee_currency - } - await trade_repo.create_trade(trade_data) + try: + # Validate all values before creating trade record + validated_timestamp = event.timestamp if event.timestamp and not math.isnan(event.timestamp) else time.time() + validated_fee = trade_fee_paid if trade_fee_paid and not math.isnan(trade_fee_paid) else 0 + + trade_data = { + "order_id": order.id, + "trade_id": f"{event.order_id}_{validated_timestamp}", + "timestamp": datetime.fromtimestamp(validated_timestamp), + "trading_pair": event.trading_pair, + "trade_type": event.trade_type.name, + "amount": float(filled_amount), # Use validated amount + "price": float(average_fill_price), # Use validated price + "fee_paid": validated_fee, + "fee_currency": trade_fee_currency + } + await trade_repo.create_trade(trade_data) + except (ValueError, TypeError) as e: + logging.error(f"Error creating trade record for {event.order_id}: {e}") + logging.error(f"Trade data that failed: timestamp={event.timestamp}, amount={event.amount}, price={event.price}, fee={trade_fee_paid}") logging.debug(f"Recorded order fill: {event.order_id} - {event.amount} @ {event.price}") except Exception as e: From 5b5e6ec51fa9bbc3151b36806d546c14c0b5e7f4 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:56:22 +0200 Subject: [PATCH 128/244] (feat) save positions snapshot --- services/accounts_service.py | 215 ++++++++++++++++++++++++++++++++++- 1 file changed, 214 insertions(+), 1 deletion(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 837947db..5b53420b 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -9,7 +9,7 @@ from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction, PositionMode from config import settings -from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository +from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository, PositionRepository, FundingRepository from services.market_data_feed_manager import MarketDataFeedManager from utils.connector_manager import ConnectorManager @@ -1049,3 +1049,216 @@ async def get_trades(self, account_name: Optional[str] = None, market: Optional[ except Exception as e: logging.error(f"Error getting trades: {e}") return [] + + async def get_account_positions(self, account_name: str, connector_name: str) -> List[Dict]: + """ + Get current positions for a specific perpetual connector. + + Args: + account_name: Name of the account + connector_name: Name of the connector (must be perpetual) + + Returns: + List of position dictionaries + + Raises: + HTTPException: If account/connector not found or not perpetual + """ + # Validate this is a perpetual connector + if "_perpetual" not in connector_name: + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") + + connector = await self.get_connector_instance(account_name, connector_name) + + # Check if connector has account_positions property + if not hasattr(connector, 'account_positions'): + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' does not support position tracking") + + try: + positions = [] + raw_positions = connector.account_positions + + for trading_pair, position_info in raw_positions.items(): + # Convert position data to dict format + position_dict = { + "account_name": account_name, + "connector_name": connector_name, + "trading_pair": trading_pair, + "side": position_info.position_side.name if hasattr(position_info, 'position_side') else "UNKNOWN", + "amount": float(position_info.amount) if hasattr(position_info, 'amount') else 0.0, + "entry_price": float(position_info.entry_price) if hasattr(position_info, 'entry_price') else None, + "mark_price": float(position_info.mark_price) if hasattr(position_info, 'mark_price') else None, + "unrealized_pnl": float(position_info.unrealized_pnl) if hasattr(position_info, 'unrealized_pnl') else None, + "percentage_pnl": float(position_info.unrealized_pnl_percentage) if hasattr(position_info, 'unrealized_pnl_percentage') else None, + "leverage": float(position_info.leverage) if hasattr(position_info, 'leverage') else None, + "margin": float(position_info.initial_margin) if hasattr(position_info, 'initial_margin') else None, + "maintenance_margin": float(position_info.maintenance_margin) if hasattr(position_info, 'maintenance_margin') else None, + "funding_fees": float(position_info.cumulative_funding_fee) if hasattr(position_info, 'cumulative_funding_fee') else 0.0, + } + + # Only include positions with non-zero amounts + if position_dict["amount"] != 0: + positions.append(position_dict) + + return positions + + except Exception as e: + logging.error(f"Failed to get positions for {connector_name}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to get positions: {str(e)}") + + async def save_position_snapshot(self, account_name: str, connector_name: str) -> Dict[str, int]: + """ + Save current positions as snapshots in the database for historical tracking. + + Args: + account_name: Name of the account + connector_name: Name of the connector + + Returns: + Dictionary with count of snapshots saved + """ + await self.ensure_db_initialized() + + try: + # Get current positions from connector + positions = await self.get_account_positions(account_name, connector_name) + + if not positions: + return {"snapshots_saved": 0, "message": "No active positions to save"} + + async with self.db_manager.get_session_context() as session: + position_repo = PositionRepository(session) + snapshots_saved = 0 + + for position in positions: + # Create snapshot data + snapshot_data = { + "account_name": account_name, + "connector_name": connector_name, + "trading_pair": position["trading_pair"], + "side": position["side"], + "exchange_size": position["amount"], + "entry_price": position["entry_price"], + "mark_price": position["mark_price"], + "unrealized_pnl": position["unrealized_pnl"], + "percentage_pnl": position["percentage_pnl"], + "leverage": position["leverage"], + "initial_margin": position["margin"], + "maintenance_margin": position["maintenance_margin"], + "cumulative_funding_fees": position["funding_fees"], + "fee_currency": "USDT", # Most perpetuals use USDT + "is_reconciled": "PENDING" + } + + await position_repo.create_position_snapshot(snapshot_data) + snapshots_saved += 1 + + return { + "snapshots_saved": snapshots_saved, + "message": f"Saved {snapshots_saved} position snapshots for {account_name}/{connector_name}" + } + + except Exception as e: + logging.error(f"Error saving position snapshots: {e}") + raise HTTPException(status_code=500, detail=f"Failed to save position snapshots: {str(e)}") + + async def get_position_snapshots(self, account_name: str, connector_name: str = None) -> List[Dict]: + """ + Get latest position snapshots from database. + + Args: + account_name: Name of the account + connector_name: Optional connector name filter + + Returns: + List of latest position snapshots + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + position_repo = PositionRepository(session) + + if connector_name: + positions = await position_repo.get_latest_positions(account_name, connector_name) + return [position_repo.to_dict(pos) for pos in positions] + else: + # Get for all perpetual connectors + all_positions = [] + all_connectors = self.connector_manager.get_all_connectors() + + if account_name in all_connectors: + for conn_name in all_connectors[account_name].keys(): + if "_perpetual" in conn_name: + positions = await position_repo.get_latest_positions(account_name, conn_name) + all_positions.extend([position_repo.to_dict(pos) for pos in positions]) + + return all_positions + + except Exception as e: + logging.error(f"Error getting position snapshots: {e}") + return [] + + async def get_funding_payments(self, account_name: str, connector_name: str = None, + trading_pair: str = None, limit: int = 100) -> List[Dict]: + """ + Get funding payment history for an account. + + Args: + account_name: Name of the account + connector_name: Optional connector name filter + trading_pair: Optional trading pair filter + limit: Maximum number of records to return + + Returns: + List of funding payment dictionaries + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + funding_repo = FundingRepository(session) + funding_payments = await funding_repo.get_funding_payments( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + limit=limit + ) + return [funding_repo.to_dict(payment) for payment in funding_payments] + + except Exception as e: + logging.error(f"Error getting funding payments: {e}") + return [] + + async def get_total_funding_fees(self, account_name: str, connector_name: str, + trading_pair: str) -> Dict: + """ + Get total funding fees for a specific trading pair. + + Args: + account_name: Name of the account + connector_name: Name of the connector + trading_pair: Trading pair to get fees for + + Returns: + Dictionary with total funding fees information + """ + await self.ensure_db_initialized() + + try: + async with self.db_manager.get_session_context() as session: + funding_repo = FundingRepository(session) + return await funding_repo.get_total_funding_fees( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair + ) + + except Exception as e: + logging.error(f"Error getting total funding fees: {e}") + return { + "total_funding_fees": 0, + "payment_count": 0, + "fee_currency": None, + "error": str(e) + } From 8c01a43e975da09dfb47d280ade12d6ee0961c89 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:56:33 +0200 Subject: [PATCH 129/244] (feat) add funding rates recorder --- utils/connector_manager.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 657368a3..ba8082d4 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -28,6 +28,7 @@ def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): self.db_manager = db_manager self._connector_cache: Dict[str, ConnectorBase] = {} self._orders_recorders: Dict[str, any] = {} + self._funding_recorders: Dict[str, any] = {} async def get_connector(self, account_name: str, connector_name: str): """ @@ -226,6 +227,16 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na orders_recorder = OrdersRecorder(self.db_manager, account_name, connector_name) orders_recorder.start(connector) self._orders_recorders[cache_key] = orders_recorder + + # Start funding tracking for perpetual connectors + if "_perpetual" in connector_name and cache_key not in self._funding_recorders: + # Import FundingRecorder dynamically to avoid circular imports + from services.funding_recorder import FundingRecorder + + # Create and start funding recorder + funding_recorder = FundingRecorder(self.db_manager, account_name, connector_name) + funding_recorder.start(connector) + self._funding_recorders[cache_key] = funding_recorder # Start the connector's network without order book tracker self._start_network_without_order_book(connector) @@ -278,6 +289,15 @@ async def stop_connector(self, account_name: str, connector_name: str): except Exception as e: logging.error(f"Error stopping order recorder for {account_name}/{connector_name}: {e}") + # Stop funding recorder if exists + if cache_key in self._funding_recorders: + try: + await self._funding_recorders[cache_key].stop() + del self._funding_recorders[cache_key] + logging.info(f"Stopped funding recorder for {account_name}/{connector_name}") + except Exception as e: + logging.error(f"Error stopping funding recorder for {account_name}/{connector_name}: {e}") + # Stop connector network if exists if cache_key in self._connector_cache: try: From f4b81684eb321a62da4ce9623bdb5cafd141048b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:57:37 +0200 Subject: [PATCH 130/244] (feat) add routes to get funding and positions --- routers/accounts.py | 254 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 254 insertions(+) diff --git a/routers/accounts.py b/routers/accounts.py index 0450568a..894bd827 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -318,4 +318,258 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di raise HTTPException(status_code=400, detail=str(e)) +# Position Management Endpoints + +@router.get("/{account_name}/{connector_name}/positions", response_model=List[Dict]) +async def get_account_positions( + account_name: str, + connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get current positions for a specific perpetual connector. + + This endpoint fetches real-time position data directly from the connector, + including unrealized PnL, leverage, funding fees, and margin information. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + + Returns: + List of current position dictionaries with real-time data + + Raises: + HTTPException: 400 if connector is not perpetual or doesn't support positions + HTTPException: 404 if account or connector not found + HTTPException: 500 if there's an error fetching positions + """ + try: + return await accounts_service.get_account_positions(account_name, connector_name) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") + +@router.get("/{account_name}/positions/snapshots", response_model=List[Dict]) +async def get_position_snapshots( + account_name: str, + connector_name: Optional[str] = Query(default=None, description="Filter by specific connector"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get latest position snapshots from database for historical analysis. + + Returns the most recent position snapshots for the specified account, + optionally filtered by connector. Useful for tracking position history + and performance over time. + + Args: + account_name: Name of the account + connector_name: Optional connector name to filter results + + Returns: + List of latest position snapshot dictionaries from database + + Raises: + HTTPException: 404 if account not found + HTTPException: 500 if there's an error fetching snapshots + """ + try: + return await accounts_service.get_position_snapshots(account_name, connector_name) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching position snapshots: {str(e)}") + + +@router.get("/{account_name}/positions", response_model=List[Dict]) +async def get_all_account_positions( + account_name: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get current positions across all perpetual connectors for an account. + + This endpoint aggregates real-time position data from all perpetual connectors + associated with the specified account, providing a complete portfolio view. + + Args: + account_name: Name of the account + + Returns: + List of position dictionaries from all perpetual connectors + + Raises: + HTTPException: 404 if account not found + HTTPException: 500 if there's an error fetching positions + """ + try: + all_positions = [] + + # Get all connectors for the account + all_connectors = accounts_service.connector_manager.get_all_connectors() + + if account_name in all_connectors: + for connector_name in all_connectors[account_name].keys(): + # Only fetch positions from perpetual connectors + if "_perpetual" in connector_name: + try: + positions = await accounts_service.get_account_positions(account_name, connector_name) + all_positions.extend(positions) + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get positions for {connector_name}: {e}") + + return all_positions + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching account positions: {str(e)}") + + +# Funding Fee Management Endpoints + +@router.get("/{account_name}/{connector_name}/funding-payments", response_model=List[Dict]) +async def get_funding_payments( + account_name: str, + connector_name: str, + trading_pair: Optional[str] = Query(default=None, description="Filter by trading pair"), + limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get funding payment history for a specific perpetual connector. + + This endpoint retrieves historical funding payment records including + funding rates, payment amounts, and position data at time of payment. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + trading_pair: Optional trading pair filter + limit: Maximum number of records to return + + Returns: + List of funding payment records with rates, amounts, and position data + + Raises: + HTTPException: 400 if connector is not perpetual + HTTPException: 404 if account or connector not found + HTTPException: 500 if there's an error fetching funding payments + """ + try: + # Validate this is a perpetual connector + if "_perpetual" not in connector_name: + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") + + return await accounts_service.get_funding_payments( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + limit=limit + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") + + +@router.get("/{account_name}/{connector_name}/funding-fees/{trading_pair}", response_model=Dict) +async def get_total_funding_fees( + account_name: str, + connector_name: str, + trading_pair: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get total funding fees summary for a specific trading pair. + + This endpoint provides aggregated funding fee information including + total fees paid/received, payment count, and fee currency. + + Args: + account_name: Name of the account + connector_name: Name of the perpetual connector + trading_pair: Trading pair to get fees for + + Returns: + Dictionary with total funding fees summary + + Raises: + HTTPException: 400 if connector is not perpetual + HTTPException: 404 if account or connector not found + HTTPException: 500 if there's an error calculating fees + """ + try: + # Validate this is a perpetual connector + if "_perpetual" not in connector_name: + raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") + + return await accounts_service.get_total_funding_fees( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error calculating funding fees: {str(e)}") + + +@router.get("/{account_name}/funding-payments", response_model=List[Dict]) +async def get_all_account_funding_payments( + account_name: str, + limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get funding payment history across all perpetual connectors for an account. + + This endpoint aggregates funding payment data from all perpetual connectors + associated with the specified account, providing a complete funding fee view. + + Args: + account_name: Name of the account + limit: Maximum number of records to return + + Returns: + List of funding payment records from all perpetual connectors + + Raises: + HTTPException: 404 if account not found + HTTPException: 500 if there's an error fetching funding payments + """ + try: + all_funding_payments = [] + + # Get all connectors for the account + all_connectors = accounts_service.connector_manager.get_all_connectors() + + if account_name in all_connectors: + for connector_name in all_connectors[account_name].keys(): + # Only fetch funding payments from perpetual connectors + if "_perpetual" in connector_name: + try: + payments = await accounts_service.get_funding_payments( + account_name=account_name, + connector_name=connector_name, + limit=limit + ) + all_funding_payments.extend(payments) + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get funding payments for {connector_name}: {e}") + + # Sort by timestamp (most recent first) + all_funding_payments.sort(key=lambda x: x.get("timestamp", ""), reverse=True) + + # Apply limit to the combined results + return all_funding_payments[:limit] + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching account funding payments: {str(e)}") + + From cc64428b61b433614983715085335d9b1b74d028 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:58:05 +0200 Subject: [PATCH 131/244] (feat) fix order repository null values --- database/repositories/order_repository.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/database/repositories/order_repository.py b/database/repositories/order_repository.py index d3e06009..3bf7ee21 100644 --- a/database/repositories/order_repository.py +++ b/database/repositories/order_repository.py @@ -49,19 +49,27 @@ async def update_order_fill(self, client_order_id: str, filled_amount: Decimal, ) order = result.scalar_one_or_none() if order: - order.filled_amount = float(filled_amount) + # Add to existing filled amount instead of replacing + previous_filled = Decimal(str(order.filled_amount or 0)) + order.filled_amount = float(previous_filled + filled_amount) + + # Update average price (simplified - use latest fill price) order.average_fill_price = float(average_fill_price) + + # Add to existing fees if fee_paid is not None: - order.fee_paid = float(fee_paid) + previous_fee = Decimal(str(order.fee_paid or 0)) + order.fee_paid = float(previous_fee + fee_paid) if fee_currency: order.fee_currency = fee_currency if exchange_order_id: order.exchange_order_id = exchange_order_id - # Update status based on fill amount - if filled_amount >= Decimal(str(order.amount)): + # Update status based on total filled amount + total_filled = Decimal(str(order.filled_amount)) + if total_filled >= Decimal(str(order.amount)): order.status = "FILLED" - else: + elif total_filled > 0: order.status = "PARTIALLY_FILLED" await self.session.flush() From 26f4effdae3abe447c779ff050b4fce2ce49eac9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:58:14 +0200 Subject: [PATCH 132/244] (feat) add funding and position repositories --- database/repositories/funding_repository.py | 84 ++++++++++++ database/repositories/position_repository.py | 133 +++++++++++++++++++ 2 files changed, 217 insertions(+) create mode 100644 database/repositories/funding_repository.py create mode 100644 database/repositories/position_repository.py diff --git a/database/repositories/funding_repository.py b/database/repositories/funding_repository.py new file mode 100644 index 00000000..e9b8dd42 --- /dev/null +++ b/database/repositories/funding_repository.py @@ -0,0 +1,84 @@ +from datetime import datetime +from typing import Dict, List, Optional +from decimal import Decimal + +from sqlalchemy import desc, select +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import FundingPayment + + +class FundingRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def create_funding_payment(self, funding_data: Dict) -> FundingPayment: + """Create a new funding payment record.""" + funding = FundingPayment(**funding_data) + self.session.add(funding) + await self.session.flush() # Get the ID + return funding + + async def get_funding_payments(self, account_name: str, connector_name: str = None, + trading_pair: str = None, limit: int = 100) -> List[FundingPayment]: + """Get funding payments with optional filters.""" + query = select(FundingPayment).where(FundingPayment.account_name == account_name) + + if connector_name: + query = query.where(FundingPayment.connector_name == connector_name) + if trading_pair: + query = query.where(FundingPayment.trading_pair == trading_pair) + + query = query.order_by(FundingPayment.timestamp.desc()).limit(limit) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_total_funding_fees(self, account_name: str, connector_name: str, + trading_pair: str) -> Dict: + """Get total funding fees for a specific trading pair.""" + query = select(FundingPayment).where( + FundingPayment.account_name == account_name, + FundingPayment.connector_name == connector_name, + FundingPayment.trading_pair == trading_pair + ) + + result = await self.session.execute(query) + payments = result.scalars().all() + + total_funding = Decimal('0') + payment_count = 0 + + for payment in payments: + total_funding += Decimal(str(payment.funding_payment)) + payment_count += 1 + + return { + "total_funding_fees": float(total_funding), + "payment_count": payment_count, + "fee_currency": payments[0].fee_currency if payments else None + } + + async def funding_payment_exists(self, funding_payment_id: str) -> bool: + """Check if a funding payment already exists.""" + result = await self.session.execute( + select(FundingPayment).where(FundingPayment.funding_payment_id == funding_payment_id) + ) + return result.scalar_one_or_none() is not None + + def to_dict(self, funding: FundingPayment) -> Dict: + """Convert FundingPayment model to dictionary format.""" + return { + "id": funding.id, + "funding_payment_id": funding.funding_payment_id, + "timestamp": funding.timestamp.isoformat(), + "account_name": funding.account_name, + "connector_name": funding.connector_name, + "trading_pair": funding.trading_pair, + "funding_rate": float(funding.funding_rate), + "funding_payment": float(funding.funding_payment), + "fee_currency": funding.fee_currency, + "position_size": float(funding.position_size) if funding.position_size else None, + "position_side": funding.position_side, + "exchange_funding_id": funding.exchange_funding_id, + } \ No newline at end of file diff --git a/database/repositories/position_repository.py b/database/repositories/position_repository.py new file mode 100644 index 00000000..8ffaaee8 --- /dev/null +++ b/database/repositories/position_repository.py @@ -0,0 +1,133 @@ +from datetime import datetime +from typing import Dict, List, Optional +from decimal import Decimal + +from sqlalchemy import desc, select, func +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import PositionSnapshot + + +class PositionRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def create_position_snapshot(self, position_data: Dict) -> PositionSnapshot: + """Create a new position snapshot record.""" + position = PositionSnapshot(**position_data) + self.session.add(position) + await self.session.flush() # Get the ID + return position + + async def get_latest_positions(self, account_name: str, connector_name: str) -> List[PositionSnapshot]: + """Get the latest position snapshots for an account-connector pair.""" + # Get the latest snapshot for each trading pair + subquery = ( + select(PositionSnapshot.trading_pair, + func.max(PositionSnapshot.timestamp).label('max_timestamp')) + .where( + PositionSnapshot.account_name == account_name, + PositionSnapshot.connector_name == connector_name, + PositionSnapshot.exchange_size != 0 # Only active positions + ) + .group_by(PositionSnapshot.trading_pair) + .subquery() + ) + + query = ( + select(PositionSnapshot) + .join(subquery, + (PositionSnapshot.trading_pair == subquery.c.trading_pair) & + (PositionSnapshot.timestamp == subquery.c.max_timestamp)) + .where( + PositionSnapshot.account_name == account_name, + PositionSnapshot.connector_name == connector_name + ) + ) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_position_history(self, account_name: str, connector_name: str, + trading_pair: str, limit: int = 100) -> List[PositionSnapshot]: + """Get position history for a specific trading pair.""" + query = ( + select(PositionSnapshot) + .where( + PositionSnapshot.account_name == account_name, + PositionSnapshot.connector_name == connector_name, + PositionSnapshot.trading_pair == trading_pair + ) + .order_by(PositionSnapshot.timestamp.desc()) + .limit(limit) + ) + + result = await self.session.execute(query) + return result.scalars().all() + + async def update_position_reconciliation(self, position_id: int, + calculated_size: Decimal, + calculated_entry_price: Decimal = None) -> Optional[PositionSnapshot]: + """Update position with calculated values for reconciliation.""" + result = await self.session.execute( + select(PositionSnapshot).where(PositionSnapshot.id == position_id) + ) + position = result.scalar_one_or_none() + + if position: + position.calculated_size = float(calculated_size) + if calculated_entry_price: + position.calculated_entry_price = float(calculated_entry_price) + + # Calculate difference and reconciliation status + size_diff = abs(calculated_size - Decimal(str(position.exchange_size))) + position.size_difference = float(size_diff) + + # Set reconciliation status (within 0.1% tolerance) + tolerance = Decimal(str(position.exchange_size)) * Decimal('0.001') + if size_diff <= tolerance: + position.is_reconciled = "RECONCILED" + else: + position.is_reconciled = "MISMATCH" + + await self.session.flush() + + return position + + async def get_reconciliation_mismatches(self, account_name: str = None) -> List[PositionSnapshot]: + """Get positions with reconciliation mismatches.""" + query = select(PositionSnapshot).where(PositionSnapshot.is_reconciled == "MISMATCH") + + if account_name: + query = query.where(PositionSnapshot.account_name == account_name) + + query = query.order_by(PositionSnapshot.timestamp.desc()) + + result = await self.session.execute(query) + return result.scalars().all() + + def to_dict(self, position: PositionSnapshot) -> Dict: + """Convert PositionSnapshot model to dictionary format.""" + return { + "id": position.id, + "account_name": position.account_name, + "connector_name": position.connector_name, + "trading_pair": position.trading_pair, + "timestamp": position.timestamp.isoformat(), + "side": position.side, + "exchange_size": float(position.exchange_size), + "entry_price": float(position.entry_price) if position.entry_price else None, + "mark_price": float(position.mark_price) if position.mark_price else None, + "unrealized_pnl": float(position.unrealized_pnl) if position.unrealized_pnl else None, + "percentage_pnl": float(position.percentage_pnl) if position.percentage_pnl else None, + "leverage": float(position.leverage) if position.leverage else None, + "initial_margin": float(position.initial_margin) if position.initial_margin else None, + "maintenance_margin": float(position.maintenance_margin) if position.maintenance_margin else None, + "cumulative_funding_fees": float(position.cumulative_funding_fees), + "fee_currency": position.fee_currency, + "calculated_size": float(position.calculated_size) if position.calculated_size else None, + "calculated_entry_price": float(position.calculated_entry_price) if position.calculated_entry_price else None, + "size_difference": float(position.size_difference) if position.size_difference else None, + "exchange_position_id": position.exchange_position_id, + "is_reconciled": position.is_reconciled, + } \ No newline at end of file From a9dd6ba91e452fd209cf46e6716ebc818a22d2cd Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 21 Jun 2025 09:58:25 +0200 Subject: [PATCH 133/244] (feat) add default to fee paid --- database/__init__.py | 6 ++-- database/models.py | 73 +++++++++++++++++++++++++++++++++++++++++++- 2 files changed, 76 insertions(+), 3 deletions(-) diff --git a/database/__init__.py b/database/__init__.py index 13b3fd4d..e7f49783 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -1,7 +1,9 @@ -from .models import AccountState, TokenState, Order, Trade, Base +from .models import AccountState, TokenState, Order, Trade, PositionSnapshot, FundingPayment, Base from .connection import AsyncDatabaseManager from .repositories import AccountRepository from .repositories.order_repository import OrderRepository from .repositories.trade_repository import TradeRepository +from .repositories.position_repository import PositionRepository +from .repositories.funding_repository import FundingRepository -__all__ = ["AccountState", "TokenState", "Order", "Trade", "Base", "AsyncDatabaseManager", "AccountRepository", "OrderRepository", "TradeRepository"] \ No newline at end of file +__all__ = ["AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "Base", "AsyncDatabaseManager", "AccountRepository", "OrderRepository", "TradeRepository", "PositionRepository", "FundingRepository"] \ No newline at end of file diff --git a/database/models.py b/database/models.py index 3c682459..35928416 100644 --- a/database/models.py +++ b/database/models.py @@ -68,7 +68,7 @@ class Order(Base): average_fill_price = Column(Numeric(precision=30, scale=18), nullable=True) # Fee information - fee_paid = Column(Numeric(precision=30, scale=18), nullable=True) + fee_paid = Column(Numeric(precision=30, scale=18), default=0, nullable=True) fee_currency = Column(String, nullable=True) # Additional metadata @@ -104,3 +104,74 @@ class Trade(Base): order = relationship("Order", back_populates="trades") +class PositionSnapshot(Base): + __tablename__ = "position_snapshots" + + id = Column(Integer, primary_key=True, index=True) + + # Position identification + account_name = Column(String, nullable=False, index=True) + connector_name = Column(String, nullable=False, index=True) + trading_pair = Column(String, nullable=False, index=True) + + # Timestamps + timestamp = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + + # Real-time exchange data (from connector.account_positions) + side = Column(String, nullable=False) # LONG, SHORT + exchange_size = Column(Numeric(precision=30, scale=18), nullable=False) # Size from exchange + entry_price = Column(Numeric(precision=30, scale=18), nullable=True) # Average entry price + mark_price = Column(Numeric(precision=30, scale=18), nullable=True) # Current mark price + + # Real-time PnL data (can't be derived from trades alone) + unrealized_pnl = Column(Numeric(precision=30, scale=18), nullable=True) # From exchange + percentage_pnl = Column(Numeric(precision=10, scale=6), nullable=True) # PnL percentage + + # Leverage and margin info + leverage = Column(Numeric(precision=10, scale=2), nullable=True) # Position leverage + initial_margin = Column(Numeric(precision=30, scale=18), nullable=True) # Initial margin + maintenance_margin = Column(Numeric(precision=30, scale=18), nullable=True) # Maintenance margin + + # Fee tracking (exchange provides cumulative data) + cumulative_funding_fees = Column(Numeric(precision=30, scale=18), nullable=False, default=0) # Funding fees + fee_currency = Column(String, nullable=True) # Fee currency (usually USDT) + + # Reconciliation fields (calculated from our trade data) + calculated_size = Column(Numeric(precision=30, scale=18), nullable=True) # Size from our trades + calculated_entry_price = Column(Numeric(precision=30, scale=18), nullable=True) # Entry from our trades + size_difference = Column(Numeric(precision=30, scale=18), nullable=True) # Difference for reconciliation + + # Additional metadata + exchange_position_id = Column(String, nullable=True, index=True) # Exchange position ID + is_reconciled = Column(String, nullable=False, default="PENDING") # RECONCILED, MISMATCH, PENDING + + +class FundingPayment(Base): + __tablename__ = "funding_payments" + + id = Column(Integer, primary_key=True, index=True) + + # Payment identification + funding_payment_id = Column(String, nullable=False, unique=True, index=True) + + # Timestamps + timestamp = Column(TIMESTAMP(timezone=True), nullable=False, index=True) + + # Account and connector info + account_name = Column(String, nullable=False, index=True) + connector_name = Column(String, nullable=False, index=True) + + # Funding details + trading_pair = Column(String, nullable=False, index=True) + funding_rate = Column(Numeric(precision=20, scale=18), nullable=False) # Funding rate + funding_payment = Column(Numeric(precision=30, scale=18), nullable=False) # Payment amount + fee_currency = Column(String, nullable=False) # Payment currency (usually USDT) + + # Position association + position_size = Column(Numeric(precision=30, scale=18), nullable=True) # Position size at time of payment + position_side = Column(String, nullable=True) # LONG, SHORT + + # Additional metadata + exchange_funding_id = Column(String, nullable=True, index=True) # Exchange funding ID + + From 9981bbc3bd496e9315931c4c7569c1a8178b2b6b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 19:47:02 +0200 Subject: [PATCH 134/244] (feat) update models --- models/__init__.py | 35 --------------------------- models/databases.py | 55 ------------------------------------------- models/performance.py | 33 -------------------------- models/trading.py | 44 ++++++++++++++++++++++++++++------ 4 files changed, 37 insertions(+), 130 deletions(-) delete mode 100644 models/databases.py delete mode 100644 models/performance.py diff --git a/models/__init__.py b/models/__init__.py index 296884c9..9f91005c 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -57,26 +57,6 @@ ScriptConfigResponse, ) -# Database models -from .databases import ( - DatabaseInfo, - DatabaseListResponse, - DatabaseReadRequest, - DatabaseReadResponse, - CheckpointRequest, - CheckpointResponse, - CheckpointListResponse, - CheckpointData, - CheckpointLoadRequest, -) - -# Performance models -from .performance import ( - ExecutorInfo, - PerformanceRequest, - PerformanceResults, - PerformanceResponse, -) # Market data models from .market_data import ( @@ -147,21 +127,6 @@ "ScriptResponse", "ScriptConfig", "ScriptConfigResponse", - # Database models - "DatabaseInfo", - "DatabaseListResponse", - "DatabaseReadRequest", - "DatabaseReadResponse", - "CheckpointRequest", - "CheckpointResponse", - "CheckpointListResponse", - "CheckpointData", - "CheckpointLoadRequest", - # Performance models - "ExecutorInfo", - "PerformanceRequest", - "PerformanceResults", - "PerformanceResponse", # Market data models "CandleData", "CandlesResponse", diff --git a/models/databases.py b/models/databases.py deleted file mode 100644 index 8dc9c1d5..00000000 --- a/models/databases.py +++ /dev/null @@ -1,55 +0,0 @@ -from typing import Dict, List, Optional, Any -from pydantic import BaseModel, Field - - -class DatabaseInfo(BaseModel): - """Information about a database""" - db_name: str = Field(description="Database name") - db_path: str = Field(description="Database file path") - healthy: bool = Field(description="Whether the database is healthy") - status: Dict[str, Any] = Field(description="Database status information") - tables: Dict[str, str] = Field(description="Database tables data (JSON strings)") - - -class DatabaseListResponse(BaseModel): - """Response for listing databases""" - databases: List[str] = Field(description="List of database file paths") - - -class DatabaseReadRequest(BaseModel): - """Request for reading databases""" - db_paths: List[str] = Field(description="List of database paths to read") - - -class DatabaseReadResponse(BaseModel): - """Response for reading databases""" - databases: List[DatabaseInfo] = Field(description="List of database information") - - -class CheckpointRequest(BaseModel): - """Request for creating a checkpoint""" - db_paths: List[str] = Field(description="List of database paths to include in checkpoint") - - -class CheckpointResponse(BaseModel): - """Response for checkpoint operations""" - message: str = Field(description="Operation result message") - success: bool = Field(default=True, description="Whether the operation was successful") - - -class CheckpointListResponse(BaseModel): - """Response for listing checkpoints""" - checkpoints: List[str] = Field(description="List of checkpoint file paths") - - -class CheckpointData(BaseModel): - """Data loaded from a checkpoint""" - executors: str = Field(description="Executors data (JSON string)") - orders: str = Field(description="Orders data (JSON string)") - trade_fill: str = Field(description="Trade fill data (JSON string)") - controllers: str = Field(description="Controllers data (JSON string)") - - -class CheckpointLoadRequest(BaseModel): - """Request for loading a checkpoint""" - checkpoint_path: str = Field(description="Path to the checkpoint file to load") \ No newline at end of file diff --git a/models/performance.py b/models/performance.py deleted file mode 100644 index 81cb6c9c..00000000 --- a/models/performance.py +++ /dev/null @@ -1,33 +0,0 @@ -from typing import Dict, List, Any -from pydantic import BaseModel, Field - - -class ExecutorInfo(BaseModel): - """Information about an executor""" - id: str = Field(description="Executor ID") - trades: List[Dict[str, Any]] = Field(description="List of executor trades") - orders: List[Dict[str, Any]] = Field(description="List of executor orders") - - -class PerformanceRequest(BaseModel): - """Request for performance analysis""" - executors: List[ExecutorInfo] = Field(description="List of executor data for analysis") - - -class PerformanceResults(BaseModel): - """Performance analysis results""" - total_pnl: float = Field(description="Total PnL") - total_pnl_pct: float = Field(description="Total PnL percentage") - total_volume: float = Field(description="Total trading volume") - total_trades: int = Field(description="Total number of trades") - win_rate: float = Field(description="Win rate percentage") - profit_factor: float = Field(description="Profit factor") - sharpe_ratio: float = Field(description="Sharpe ratio") - max_drawdown: float = Field(description="Maximum drawdown") - avg_trade_pnl: float = Field(description="Average trade PnL") - - -class PerformanceResponse(BaseModel): - """Response for performance analysis""" - executors: List[ExecutorInfo] = Field(description="Original executor data") - results: PerformanceResults = Field(description="Performance analysis results") \ No newline at end of file diff --git a/models/trading.py b/models/trading.py index de020e6c..da5e8f11 100644 --- a/models/trading.py +++ b/models/trading.py @@ -1,5 +1,5 @@ -from typing import Dict, List, Optional, Any -from pydantic import BaseModel, Field +from typing import Dict, List, Optional, Any, Literal +from pydantic import BaseModel, Field, field_validator from decimal import Decimal from datetime import datetime from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction @@ -10,11 +10,41 @@ class TradeRequest(BaseModel): account_name: str = Field(description="Name of the account to trade with") connector_name: str = Field(description="Name of the connector/exchange") trading_pair: str = Field(description="Trading pair (e.g., BTC-USDT)") - trade_type: TradeType = Field(description="Whether to buy or sell") + trade_type: Literal["BUY", "SELL"] = Field(description="Whether to buy or sell") amount: Decimal = Field(description="Amount to trade", gt=0) - order_type: OrderType = Field(default=OrderType.LIMIT, description="Type of order") + order_type: Literal["LIMIT", "MARKET", "LIMIT_MAKER"] = Field(default="LIMIT", description="Type of order") price: Optional[Decimal] = Field(default=None, description="Price for limit orders") - position_action: PositionAction = Field(default=PositionAction.OPEN, description="Position action for perpetual contracts (OPEN/CLOSE)") + position_action: Literal["OPEN", "CLOSE"] = Field(default="OPEN", description="Position action for perpetual contracts (OPEN/CLOSE)") + + @field_validator('trade_type') + @classmethod + def validate_trade_type(cls, v): + """Validate that trade_type is a valid TradeType enum name.""" + try: + return TradeType[v].name + except KeyError: + valid_types = [t.name for t in TradeType] + raise ValueError(f"Invalid trade_type '{v}'. Must be one of: {valid_types}") + + @field_validator('order_type') + @classmethod + def validate_order_type(cls, v): + """Validate that order_type is a valid OrderType enum name.""" + try: + return OrderType[v].name + except KeyError: + valid_types = [t.name for t in OrderType] + raise ValueError(f"Invalid order_type '{v}'. Must be one of: {valid_types}") + + @field_validator('position_action') + @classmethod + def validate_position_action(cls, v): + """Validate that position_action is a valid PositionAction enum name.""" + try: + return PositionAction[v].name + except KeyError: + valid_actions = [a.name for a in PositionAction] + raise ValueError(f"Invalid position_action '{v}'. Must be one of: {valid_actions}") class TradeResponse(BaseModel): @@ -23,9 +53,9 @@ class TradeResponse(BaseModel): account_name: str = Field(description="Account used for the trade") connector_name: str = Field(description="Connector used for the trade") trading_pair: str = Field(description="Trading pair") - trade_type: TradeType = Field(description="Trade type") + trade_type: str = Field(description="Trade type") amount: Decimal = Field(description="Trade amount") - order_type: OrderType = Field(description="Order type") + order_type: str = Field(description="Order type") price: Optional[Decimal] = Field(description="Order price") status: str = Field(default="submitted", description="Order status") From 9183e5eb4d9616d97354d3266b381aee2524032f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 19:47:17 +0200 Subject: [PATCH 135/244] (feat) migrate endpoitns to archived bots --- routers/archived_bots.py | 258 +++++++++++++++++++++++++++++++++++++++ routers/databases.py | 141 --------------------- routers/performance.py | 37 ------ 3 files changed, 258 insertions(+), 178 deletions(-) create mode 100644 routers/archived_bots.py delete mode 100644 routers/databases.py delete mode 100644 routers/performance.py diff --git a/routers/archived_bots.py b/routers/archived_bots.py new file mode 100644 index 00000000..123aa0ca --- /dev/null +++ b/routers/archived_bots.py @@ -0,0 +1,258 @@ +from typing import List, Dict, Any, Optional +from fastapi import APIRouter, HTTPException, Query + +from utils.file_system import fs_util +from utils.hummingbot_database_reader import HummingbotDatabase, PerformanceDataSource +from hummingbot.strategy_v2.backtesting.backtesting_engine_base import BacktestingEngineBase + +router = APIRouter(tags=["Archived Bots"], prefix="/archived-bots") + + +@router.get("/", response_model=List[str]) +async def list_databases(): + """ + List all available database files in the system. + + Returns: + List of database file paths + """ + return fs_util.list_databases() + + +@router.get("/{db_path:path}/status") +async def get_database_status(db_path: str): + """ + Get status information for a specific database. + + Args: + db_path: Path to the database file + + Returns: + Database status including table health + """ + try: + db = HummingbotDatabase(db_path) + return { + "db_path": db_path, + "status": db.status, + "healthy": db.status["general_status"] + } + except Exception as e: + raise HTTPException(status_code=404, detail=f"Database not found or error: {str(e)}") + + +@router.get("/{db_path:path}/summary") +async def get_database_summary(db_path: str): + """ + Get a summary of database contents including basic statistics. + + Args: + db_path: Full path to the database file + + Returns: + Summary statistics of the database contents + """ + try: + db = HummingbotDatabase(db_path) + + # Get basic counts + orders = db.get_orders() + trades = db.get_trade_fills() + executors = db.get_executors_data() + + return { + "db_path": db_path, + "total_orders": len(orders), + "total_trades": len(trades), + "total_executors": len(executors), + "trading_pairs": orders["symbol"].unique().tolist() if len(orders) > 0 else [], + "exchanges": orders["market"].unique().tolist() if len(orders) > 0 else [], + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error analyzing database: {str(e)}") + + +@router.get("/{db_path:path}/performance") +async def get_database_performance(db_path: str): + """ + Get detailed performance analysis for a bot database. + + Args: + db_path: Full path to the database file + + Returns: + Detailed performance metrics including PnL, sharpe ratio, etc. + """ + try: + db = HummingbotDatabase(db_path) + + # Get executors data + executors = db.get_executors_data() + + if len(executors) == 0: + return { + "db_path": db_path, + "error": "No executors found in database", + "results": {} + } + + # Convert to performance data source + executors_dict = executors.to_dict('records') + data_source = PerformanceDataSource(executors_dict) + + # Calculate performance + backtesting_engine = BacktestingEngineBase() + executor_info_list = data_source.executor_info_list + results = backtesting_engine.summarize_results(executor_info_list) + + # Clean up results + results["sharpe_ratio"] = results["sharpe_ratio"] if results["sharpe_ratio"] is not None else 0 + + return { + "db_path": db_path, + "results": results, + "executor_count": len(executor_info_list) + } + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error calculating performance: {str(e)}") + + +@router.get("/{db_path:path}/trades") +async def get_database_trades( + db_path: str, + limit: int = Query(default=100, description="Limit number of trades returned"), + offset: int = Query(default=0, description="Offset for pagination") +): + """ + Get trade history from a database. + + Args: + db_path: Full path to the database file + limit: Maximum number of trades to return + offset: Offset for pagination + + Returns: + List of trades with pagination info + """ + try: + db = HummingbotDatabase(db_path) + trades = db.get_trade_fills() + + # Apply pagination + total_trades = len(trades) + trades_page = trades.iloc[offset:offset + limit] + + return { + "db_path": db_path, + "trades": trades_page.to_dict('records'), + "pagination": { + "total": total_trades, + "limit": limit, + "offset": offset, + "has_more": offset + limit < total_trades + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching trades: {str(e)}") + + +@router.get("/{db_path:path}/orders") +async def get_database_orders( + db_path: str, + limit: int = Query(default=100, description="Limit number of orders returned"), + offset: int = Query(default=0, description="Offset for pagination"), + status: Optional[str] = Query(default=None, description="Filter by order status") +): + """ + Get order history from a database. + + Args: + db_path: Full path to the database file + limit: Maximum number of orders to return + offset: Offset for pagination + status: Optional status filter + + Returns: + List of orders with pagination info + """ + try: + db = HummingbotDatabase(db_path) + orders = db.get_orders() + + # Apply status filter if provided + if status: + orders = orders[orders["last_status"] == status] + + # Apply pagination + total_orders = len(orders) + orders_page = orders.iloc[offset:offset + limit] + + return { + "db_path": db_path, + "orders": orders_page.to_dict('records'), + "pagination": { + "total": total_orders, + "limit": limit, + "offset": offset, + "has_more": offset + limit < total_orders + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching orders: {str(e)}") + + +@router.get("/{db_path:path}/executors") +async def get_database_executors(db_path: str): + """ + Get executor data from a database. + + Args: + db_path: Full path to the database file + + Returns: + List of executors with their configurations and results + """ + try: + db = HummingbotDatabase(db_path) + executors = db.get_executors_data() + + return { + "db_path": db_path, + "executors": executors.to_dict('records'), + "total": len(executors) + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching executors: {str(e)}") + + +@router.post("/read", response_model=List[Dict[str, Any]]) +async def read_databases(db_paths: List[str]): + """ + Read and extract basic information from multiple database files. + + Args: + db_paths: List of database file paths to read + + Returns: + List of database status information + """ + results = [] + for db_path in db_paths: + try: + db = HummingbotDatabase(db_path) + db_info = { + "db_name": db.db_name, + "db_path": db.db_path, + "healthy": db.status["general_status"], + "status": db.status, + } + except Exception as e: + db_info = { + "db_name": "", + "db_path": db_path, + "healthy": False, + "error": str(e) + } + results.append(db_info) + return results \ No newline at end of file diff --git a/routers/databases.py b/routers/databases.py deleted file mode 100644 index 1cf9f120..00000000 --- a/routers/databases.py +++ /dev/null @@ -1,141 +0,0 @@ -import json -import time - -from typing import List, Dict, Any - -import pandas as pd - -from utils.etl_databases import HummingbotDatabase, ETLPerformance -from fastapi import APIRouter - -from utils.file_system import fs_util - -router = APIRouter(tags=["Databases"], prefix="/databases") - - -@router.get("/", response_model=List[str]) -async def list_databases(): - """ - List all available database files in the system. - - Returns: - List of database file paths - """ - return fs_util.list_databases() - - -@router.post("/read", response_model=List[Dict[str, Any]]) -async def read_databases(db_paths: List[str] = None): - """ - Read and extract data from multiple database files. - - Args: - db_paths: List of database file paths to read - - Returns: - List of database contents with tables and status information - """ - dbs = [] - for db_path in db_paths: - db = HummingbotDatabase(db_path) - try: - db_content = { - "db_name": db.db_name, - "db_path": db.db_path, - "healthy": db.status["general_status"], - "status": db.status, - "tables": { - "orders": json.dumps(db.get_orders().to_dict()), - "trade_fill": json.dumps(db.get_trade_fills().to_dict()), - "executors": json.dumps(db.get_executors_data().to_dict()), - "order_status": json.dumps(db.get_order_status().to_dict()), - "controllers": json.dumps(db.get_controllers_data().to_dict()) - } - } - except Exception as e: - print(f"Error reading database {db_path}: {str(e)}") - db_content = { - "db_name": "", - "db_path": db_path, - "healthy": False, - "status": db.status, - "tables": {} - } - dbs.append(db_content) - return dbs - - -@router.post("/checkpoint", response_model=Dict[str, Any]) -async def create_checkpoint(db_paths: List[str]): - """ - Create a checkpoint by consolidating data from multiple databases. - - Args: - db_paths: List of database paths to include in checkpoint - - Returns: - Dictionary with checkpoint creation status - """ - try: - dbs = await read_databases(db_paths) - - healthy_dbs = [db for db in dbs if db["healthy"]] - - table_names = ["trade_fill", "orders", "order_status", "executors", "controllers"] - tables_dict = {name: pd.DataFrame() for name in table_names} - - for db in healthy_dbs: - for table_name in table_names: - new_data = pd.DataFrame(json.loads(db["tables"][table_name])) - new_data["db_path"] = db["db_path"] - new_data["db_name"] = db["db_name"] - tables_dict[table_name] = pd.concat([tables_dict[table_name], new_data]) - - etl = ETLPerformance(db_path=f"bots/data/checkpoint_{str(int(time.time()))}.sqlite") - etl.create_tables() - etl.insert_data(tables_dict) - return {"message": "Checkpoint created successfully."} - except Exception as e: - return {"message": f"Error: {str(e)}"} - - -@router.get("/checkpoints", response_model=List[str]) -async def list_checkpoints(full_path: bool): - """ - List all available checkpoint files. - - Args: - full_path: Whether to return full file paths or just filenames - - Returns: - List of checkpoint file paths or names - """ - return fs_util.list_checkpoints(full_path) - - -@router.post("/checkpoints/load") -async def load_checkpoint(checkpoint_path: str): - """ - Load data from a checkpoint file. - - Args: - checkpoint_path: Path to the checkpoint file to load - - Returns: - Dictionary with checkpoint data including executors, orders, trades, and controllers - """ - try: - etl = ETLPerformance(checkpoint_path) - executor = etl.load_executors() - order = etl.load_orders() - trade_fill = etl.load_trade_fill() - controllers = etl.load_controllers() - checkpoint_data = { - "executors": json.dumps(executor.to_dict()), - "orders": json.dumps(order.to_dict()), - "trade_fill": json.dumps(trade_fill.to_dict()), - "controllers": json.dumps(controllers.to_dict()) - } - return checkpoint_data - except Exception as e: - return {"message": f"Error: {str(e)}"} \ No newline at end of file diff --git a/routers/performance.py b/routers/performance.py deleted file mode 100644 index 4469458d..00000000 --- a/routers/performance.py +++ /dev/null @@ -1,37 +0,0 @@ -from fastapi import APIRouter -from typing import Any, Dict - -from hummingbot.strategy_v2.backtesting.backtesting_engine_base import BacktestingEngineBase - -from utils.etl_databases import PerformanceDataSource - -router = APIRouter(tags=["Performance"], prefix="/performance") - - -@router.post("/results") -async def get_performance_results(payload: Dict[str, Any]): - """ - Calculate performance results from executor data. - - Args: - payload: Dictionary containing executors data for performance analysis - - Returns: - Dictionary with executors and calculated performance results - """ - executors = payload.get("executors") - data_source = PerformanceDataSource(executors) - performance_results = {} - try: - backtesting_engine = BacktestingEngineBase() - executor_info_list = data_source.executor_info_list - performance_results["results"] = backtesting_engine.summarize_results(executor_info_list) - results = performance_results["results"] - results["sharpe_ratio"] = results["sharpe_ratio"] if results["sharpe_ratio"] is not None else 0 - return { - "executors": executors, - "results": performance_results["results"], - } - - except Exception as e: - return {"error": str(e)} \ No newline at end of file From cbb4318d2b2696fc1c785f81050a0df10596e929 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 19:47:36 +0200 Subject: [PATCH 136/244] (feat) simplify order type, side and position with enum str --- routers/trading.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index 58fd4677..b3acab01 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -2,7 +2,7 @@ from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, Query -from hummingbot.core.data_type.common import PositionMode +from hummingbot.core.data_type.common import PositionMode, TradeType, OrderType, PositionAction from starlette import status from services.accounts_service import AccountsService @@ -34,15 +34,20 @@ async def place_trade(trade_request: TradeRequest, HTTPException: 400 for invalid parameters, 404 for account/connector not found, 500 for trade execution errors """ try: + # Convert string names to enum instances + trade_type_enum = TradeType[trade_request.trade_type] + order_type_enum = OrderType[trade_request.order_type] + position_action_enum = PositionAction[trade_request.position_action] + order_id = await accounts_service.place_trade( account_name=trade_request.account_name, connector_name=trade_request.connector_name, trading_pair=trade_request.trading_pair, - trade_type=trade_request.trade_type, + trade_type=trade_type_enum, amount=trade_request.amount, - order_type=trade_request.order_type, + order_type=order_type_enum, price=trade_request.price, - position_action=trade_request.position_action, + position_action=position_action_enum, market_data_manager=market_data_manager ) From 563b0074fca18c41f53c3a9a967184caa5f947eb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 19:48:51 +0200 Subject: [PATCH 137/244] (feat) replace etl_databases for hummingbot database reader --- ...bases.py => hummingbot_database_reader.py} | 211 ------------------ 1 file changed, 211 deletions(-) rename utils/{etl_databases.py => hummingbot_database_reader.py} (50%) diff --git a/utils/etl_databases.py b/utils/hummingbot_database_reader.py similarity index 50% rename from utils/etl_databases.py rename to utils/hummingbot_database_reader.py index 85efe9ee..8ece1f61 100644 --- a/utils/etl_databases.py +++ b/utils/hummingbot_database_reader.py @@ -88,217 +88,6 @@ def get_controllers_data(self) -> pd.DataFrame: return controllers -class ETLPerformance: - def __init__(self, - db_path: str): - self.db_path = f'sqlite:///{os.path.join(db_path)}' - self.engine = create_engine(self.db_path, connect_args={'check_same_thread': False}) - self.session_maker = sessionmaker(bind=self.engine) - self.metadata = MetaData() - - @property - def executors_table(self): - return Table('executors', - MetaData(), - Column('id', String), - Column('timestamp', Integer), - Column('type', String), - Column('close_type', Integer), - Column('close_timestamp', Integer), - Column('status', String), - Column('config', String), - Column('net_pnl_pct', Float), - Column('net_pnl_quote', Float), - Column('cum_fees_quote', Float), - Column('filled_amount_quote', Float), - Column('is_active', Integer), - Column('is_trading', Integer), - Column('custom_info', String), - Column('controller_id', String)) - - @property - def trade_fill_table(self): - return Table( - 'trades', MetaData(), - Column('config_file_path', VARCHAR(255)), - Column('strategy', VARCHAR(255)), - Column('market', VARCHAR(255)), - Column('symbol', VARCHAR(255)), - Column('base_asset', VARCHAR(255)), - Column('quote_asset', VARCHAR(255)), - Column('timestamp', INT), - Column('order_id', VARCHAR(255)), - Column('trade_type', VARCHAR(255)), - Column('order_type', VARCHAR(255)), - Column('price', FLOAT), - Column('amount', FLOAT), - Column('leverage', INT), - Column('trade_fee', VARCHAR(255)), - Column('trade_fee_in_quote', FLOAT), - Column('exchange_trade_id', VARCHAR(255)), - Column('position', VARCHAR(255)), - ) - - @property - def orders_table(self): - return Table( - 'orders', MetaData(), - Column('client_order_id', VARCHAR(255)), - Column('config_file_path', VARCHAR(255)), - Column('strategy', VARCHAR(255)), - Column('market', VARCHAR(255)), - Column('symbol', VARCHAR(255)), - Column('base_asset', VARCHAR(255)), - Column('quote_asset', VARCHAR(255)), - Column('creation_timestamp', INT), - Column('order_type', VARCHAR(255)), - Column('amount', FLOAT), - Column('leverage', INT), - Column('price', FLOAT), - Column('last_status', VARCHAR(255)), - Column('last_update_timestamp', INT), - Column('exchange_order_id', VARCHAR(255)), - Column('position', VARCHAR(255)), - ) - - @property - def controllers_table(self): - return Table( - 'controllers', MetaData(), - Column('id', VARCHAR(255)), - Column('controller_id', INT), - Column('timestamp', FLOAT), - Column('type', VARCHAR(255)), - Column('config', String), - ) - - @property - def tables(self): - return [self.executors_table, self.trade_fill_table, self.orders_table, self.controllers_table] - - def create_tables(self): - with self.engine.connect(): - for table in self.tables: - table.create(self.engine) - - def insert_data(self, data): - if "executors" in data: - self.insert_executors(data["executors"]) - if "trade_fill" in data: - self.insert_trade_fill(data["trade_fill"]) - if "orders" in data: - self.insert_orders(data["orders"]) - if "controllers" in data: - self.insert_controllers(data["controllers"]) - - def insert_executors(self, executors): - with self.engine.connect() as conn: - for _, row in executors.iterrows(): - ins = self.executors_table.insert().values( - id=row["id"], - timestamp=row["timestamp"], - type=row["type"], - close_type=row["close_type"], - close_timestamp=row["close_timestamp"], - status=row["status"], - config=row["config"], - net_pnl_pct=row["net_pnl_pct"], - net_pnl_quote=row["net_pnl_quote"], - cum_fees_quote=row["cum_fees_quote"], - filled_amount_quote=row["filled_amount_quote"], - is_active=row["is_active"], - is_trading=row["is_trading"], - custom_info=row["custom_info"], - controller_id=row["controller_id"]) - conn.execute(ins) - conn.commit() - - def insert_trade_fill(self, trade_fill): - with self.engine.connect() as conn: - for _, row in trade_fill.iterrows(): - ins = insert(self.trade_fill_table).values( - config_file_path=row["config_file_path"], - strategy=row["strategy"], - market=row["market"], - symbol=row["symbol"], - base_asset=row["base_asset"], - quote_asset=row["quote_asset"], - timestamp=row["timestamp"], - order_id=row["order_id"], - trade_type=row["trade_type"], - order_type=row["order_type"], - price=row["price"], - amount=row["amount"], - leverage=row["leverage"], - trade_fee=row["trade_fee"], - trade_fee_in_quote=row["trade_fee_in_quote"], - exchange_trade_id=row["exchange_trade_id"], - position=row["position"], - ) - conn.execute(ins) - conn.commit() - - def insert_orders(self, orders): - with self.engine.connect() as conn: - for _, row in orders.iterrows(): - ins = insert(self.orders_table).values( - client_order_id=row["id"], - config_file_path=row["config_file_path"], - strategy=row["strategy"], - market=row["market"], - symbol=row["symbol"], - base_asset=row["base_asset"], - quote_asset=row["quote_asset"], - creation_timestamp=row["creation_timestamp"], - order_type=row["order_type"], - amount=row["amount"], - leverage=row["leverage"], - price=row["price"], - last_status=row["last_status"], - last_update_timestamp=row["last_update_timestamp"], - exchange_order_id=row["exchange_order_id"], - position=row["position"], - ) - conn.execute(ins) - conn.commit() - - def insert_controllers(self, controllers): - with self.engine.connect() as conn: - for _, row in controllers.iterrows(): - ins = insert(self.controllers_table).values( - id=row["id"], - controller_id=row["controller_id"], - timestamp=row["timestamp"], - type=row["type"], - config=row["config"], - ) - conn.execute(ins) - conn.commit() - - def load_executors(self): - with self.session_maker() as session: - query = "SELECT * FROM executors" - executors = pd.read_sql_query(text(query), session.connection()) - return executors - - def load_trade_fill(self): - with self.session_maker() as session: - query = "SELECT * FROM trades" - trade_fill = pd.read_sql_query(text(query), session.connection()) - return trade_fill - - def load_orders(self): - with self.session_maker() as session: - query = "SELECT * FROM orders" - orders = pd.read_sql_query(text(query), session.connection()) - return orders - - def load_controllers(self): - with self.session_maker() as session: - query = "SELECT * FROM controllers" - controllers = pd.read_sql_query(text(query), session.connection()) - return controllers - class PerformanceDataSource: def __init__(self, executors_dict: Dict[str, Any]): From 4bd431fe4e5caacbce0691b3d93b79a67463a4ac Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 19:49:04 +0200 Subject: [PATCH 138/244] (feat) update routers --- main.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/main.py b/main.py index 39293dc1..edd696b0 100644 --- a/main.py +++ b/main.py @@ -19,13 +19,12 @@ from utils.bot_archiver import BotArchiver from routers import ( accounts, + archived_bots, backtesting, bot_orchestration, controllers, - databases, docker, market_data, - performance, scripts, trading ) @@ -174,8 +173,7 @@ def auth_user( app.include_router(scripts.router, dependencies=[Depends(auth_user)]) app.include_router(market_data.router, dependencies=[Depends(auth_user)]) app.include_router(backtesting.router, dependencies=[Depends(auth_user)]) -app.include_router(databases.router, dependencies=[Depends(auth_user)]) -app.include_router(performance.router, dependencies=[Depends(auth_user)]) +app.include_router(archived_bots.router, dependencies=[Depends(auth_user)]) @app.get("/") async def root(): From bee74c9cba428dc8b2257191164ae62b47e9d903 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 21:10:29 +0200 Subject: [PATCH 139/244] (feat) add pull func to docker service --- services/docker_service.py | 179 +++++++++++++++++++++++++++++++++++-- 1 file changed, 171 insertions(+), 8 deletions(-) diff --git a/services/docker_service.py b/services/docker_service.py index 563f3246..9fdc82ec 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -1,6 +1,9 @@ import logging import os import shutil +import time +import threading +from typing import Dict import docker from docker.errors import DockerException @@ -12,18 +15,33 @@ class DockerService: + # Class-level configuration for cleanup + PULL_STATUS_MAX_AGE_SECONDS = 3600 # Keep status for 1 hour + PULL_STATUS_MAX_ENTRIES = 100 # Maximum number of entries to keep + CLEANUP_INTERVAL_SECONDS = 300 # Run cleanup every 5 minutes + def __init__(self): self.SOURCE_PATH = os.getcwd() + self._pull_status: Dict[str, Dict] = {} + self._cleanup_thread = None + self._stop_cleanup = threading.Event() + try: self.client = docker.from_env() + # Start background cleanup thread + self._start_cleanup_thread() except DockerException as e: logging.error(f"It was not possible to connect to Docker. Please make sure Docker is running. Error: {e}") - def get_active_containers(self): + def get_active_containers(self, name_filter: str = None): try: - containers_info = [{"id": container.id, "name": container.name, "status": container.status} for - container in self.client.containers.list(filters={"status": "running"}) if - "hummingbot" in container.name and "broker" not in container.name] + all_containers = self.client.containers.list(filters={"status": "running"}) + if name_filter: + containers_info = [{"id": container.id, "name": container.name, "status": container.status} for + container in all_containers if name_filter.lower() in container.name.lower()] + else: + containers_info = [{"id": container.id, "name": container.name, "status": container.status} for + container in all_containers] return {"active_instances": containers_info} except DockerException as e: return str(e) @@ -49,11 +67,15 @@ def pull_image_sync(self, image_name): except DockerException as e: return {"success": False, "error": str(e)} - def get_exited_containers(self): + def get_exited_containers(self, name_filter: str = None): try: - containers_info = [{"id": container.id, "name": container.name, "status": container.status} for - container in self.client.containers.list(filters={"status": "exited"}) if - "hummingbot" in container.name and "broker" not in container.name] + all_containers = self.client.containers.list(filters={"status": "exited"}) + if name_filter: + containers_info = [{"id": container.id, "name": container.name, "status": container.status} for + container in all_containers if name_filter.lower() in container.name.lower()] + else: + containers_info = [{"id": container.id, "name": container.name, "status": container.status} for + container in all_containers] return {"exited_instances": containers_info} except DockerException as e: return str(e) @@ -222,3 +244,144 @@ def create_hummingbot_instance(self, config: V2ScriptDeployment): return {"success": True, "message": f"Instance {instance_name} created successfully."} except docker.errors.DockerException as e: return {"success": False, "message": str(e)} + + def _start_cleanup_thread(self): + """Start the background cleanup thread""" + if self._cleanup_thread is None or not self._cleanup_thread.is_alive(): + self._cleanup_thread = threading.Thread(target=self._periodic_cleanup, daemon=True) + self._cleanup_thread.start() + logging.info("Started Docker pull status cleanup thread") + + def _periodic_cleanup(self): + """Periodically clean up old pull status entries""" + while not self._stop_cleanup.is_set(): + try: + self._cleanup_old_pull_status() + except Exception as e: + logging.error(f"Error in cleanup thread: {e}") + + # Wait for the next cleanup interval + self._stop_cleanup.wait(self.CLEANUP_INTERVAL_SECONDS) + + def _cleanup_old_pull_status(self): + """Remove old entries to prevent memory growth""" + current_time = time.time() + to_remove = [] + + # Find entries older than max age + for image_name, status_info in self._pull_status.items(): + # Skip ongoing pulls + if status_info["status"] == "pulling": + continue + + # Check age of completed/failed operations + end_time = status_info.get("completed_at") or status_info.get("failed_at") + if end_time and (current_time - end_time > self.PULL_STATUS_MAX_AGE_SECONDS): + to_remove.append(image_name) + + # Remove old entries + for image_name in to_remove: + del self._pull_status[image_name] + logging.info(f"Cleaned up old pull status for {image_name}") + + # If still over limit, remove oldest completed/failed entries + if len(self._pull_status) > self.PULL_STATUS_MAX_ENTRIES: + completed_entries = [ + (name, info) for name, info in self._pull_status.items() + if info["status"] in ["completed", "failed"] + ] + # Sort by end time (oldest first) + completed_entries.sort( + key=lambda x: x[1].get("completed_at") or x[1].get("failed_at") or 0 + ) + + # Remove oldest entries to get under limit + excess_count = len(self._pull_status) - self.PULL_STATUS_MAX_ENTRIES + for i in range(min(excess_count, len(completed_entries))): + del self._pull_status[completed_entries[i][0]] + logging.info(f"Cleaned up excess pull status for {completed_entries[i][0]}") + + def pull_image_async(self, image_name: str): + """Start pulling a Docker image asynchronously with status tracking""" + # Check if pull is already in progress + if image_name in self._pull_status: + current_status = self._pull_status[image_name] + if current_status["status"] == "pulling": + return { + "message": f"Pull already in progress for {image_name}", + "status": "in_progress", + "started_at": current_status["started_at"], + "image_name": image_name + } + + # Start the pull in a background thread + threading.Thread(target=self._pull_image_with_tracking, args=(image_name,), daemon=True).start() + + return { + "message": f"Pull started for {image_name}", + "status": "started", + "image_name": image_name + } + + def _pull_image_with_tracking(self, image_name: str): + """Background task to pull Docker image with status tracking""" + try: + self._pull_status[image_name] = { + "status": "pulling", + "started_at": time.time(), + "progress": "Starting pull..." + } + + # Use the synchronous pull method + result = self.pull_image_sync(image_name) + + if result.get("success"): + self._pull_status[image_name] = { + "status": "completed", + "started_at": self._pull_status[image_name]["started_at"], + "completed_at": time.time(), + "result": result + } + else: + self._pull_status[image_name] = { + "status": "failed", + "started_at": self._pull_status[image_name]["started_at"], + "failed_at": time.time(), + "error": result.get("error", "Unknown error") + } + except Exception as e: + self._pull_status[image_name] = { + "status": "failed", + "started_at": self._pull_status[image_name].get("started_at", time.time()), + "failed_at": time.time(), + "error": str(e) + } + + def get_all_pull_status(self): + """Get status of all pull operations""" + operations = {} + for image_name, status_info in self._pull_status.items(): + status_copy = status_info.copy() + + # Add duration for each operation + start_time = status_copy.get("started_at") + if start_time: + if status_copy["status"] == "pulling": + status_copy["duration_seconds"] = round(time.time() - start_time, 2) + elif "completed_at" in status_copy: + status_copy["duration_seconds"] = round(status_copy["completed_at"] - start_time, 2) + elif "failed_at" in status_copy: + status_copy["duration_seconds"] = round(status_copy["failed_at"] - start_time, 2) + + operations[image_name] = status_copy + + return { + "pull_operations": operations, + "total_operations": len(operations) + } + + def cleanup(self): + """Clean up resources when shutting down""" + self._stop_cleanup.set() + if self._cleanup_thread: + self._cleanup_thread.join(timeout=1) From 5fb8ca69a92f070f0f40c9c41f15dc98751d1c40 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 21:10:41 +0200 Subject: [PATCH 140/244] (feat) update docker routers to use service to pull images --- routers/docker.py | 303 ++++++++-------------------------------------- 1 file changed, 49 insertions(+), 254 deletions(-) diff --git a/routers/docker.py b/routers/docker.py index a003a720..6f9769b0 100644 --- a/routers/docker.py +++ b/routers/docker.py @@ -1,9 +1,6 @@ -import logging import os -import time -from typing import Dict -from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks +from fastapi import APIRouter, HTTPException, Depends from models import DockerImage from utils.bot_archiver import BotArchiver @@ -12,181 +9,113 @@ router = APIRouter(tags=["Docker"], prefix="/docker") -# Global state to track image pulls (in production, consider using Redis or database) -_pull_status: Dict[str, Dict] = {} - -# Configuration for cleanup -PULL_STATUS_MAX_AGE_SECONDS = 3600 # Keep status for 1 hour -PULL_STATUS_MAX_ENTRIES = 100 # Maximum number of entries to keep - - -def _cleanup_old_pull_status(): - """Remove old entries to prevent memory growth""" - current_time = time.time() - to_remove = [] - - # Find entries older than max age - for image_name, status_info in _pull_status.items(): - # Skip ongoing pulls - if status_info["status"] == "pulling": - continue - - # Check age of completed/failed operations - end_time = status_info.get("completed_at") or status_info.get("failed_at") - if end_time and (current_time - end_time > PULL_STATUS_MAX_AGE_SECONDS): - to_remove.append(image_name) - - # Remove old entries - for image_name in to_remove: - del _pull_status[image_name] - logging.info(f"Cleaned up old pull status for {image_name}") - - # If still over limit, remove oldest completed/failed entries - if len(_pull_status) > PULL_STATUS_MAX_ENTRIES: - completed_entries = [ - (name, info) for name, info in _pull_status.items() - if info["status"] in ["completed", "failed"] - ] - # Sort by end time (oldest first) - completed_entries.sort( - key=lambda x: x[1].get("completed_at") or x[1].get("failed_at") or 0 - ) - - # Remove oldest entries to get under limit - excess_count = len(_pull_status) - PULL_STATUS_MAX_ENTRIES - for i in range(min(excess_count, len(completed_entries))): - del _pull_status[completed_entries[i][0]] - logging.info(f"Cleaned up excess pull status for {completed_entries[i][0]}") - - -def _background_pull_image(image_name: str, docker_manager: DockerService): - """Background task to pull Docker image""" - try: - _pull_status[image_name] = { - "status": "pulling", - "started_at": time.time(), - "progress": "Starting pull..." - } - - # Use the synchronous pull method in background - result = docker_manager.pull_image_sync(image_name) - - if result.get("success"): - _pull_status[image_name] = { - "status": "completed", - "started_at": _pull_status[image_name]["started_at"], - "completed_at": time.time(), - "result": result - } - else: - _pull_status[image_name] = { - "status": "failed", - "started_at": _pull_status[image_name]["started_at"], - "failed_at": time.time(), - "error": result.get("error", "Unknown error") - } - except Exception as e: - _pull_status[image_name] = { - "status": "failed", - "started_at": _pull_status[image_name].get("started_at", time.time()), - "failed_at": time.time(), - "error": str(e) - } - @router.get("/running") -async def is_docker_running(docker_manager: DockerService = Depends(get_docker_service)): +async def is_docker_running(docker_service: DockerService = Depends(get_docker_service)): """ Check if Docker daemon is running. Args: - docker_manager: Docker service dependency + docker_service: Docker service dependency Returns: Dictionary indicating if Docker is running """ - return {"is_docker_running": docker_manager.is_docker_running()} + return {"is_docker_running": docker_service.is_docker_running()} @router.get("/available-images/{image_name}") -async def available_images(image_name: str, docker_manager: DockerService = Depends(get_docker_service)): +async def available_images(image_name: str, docker_service: DockerService = Depends(get_docker_service)): """ Get available Docker images matching the specified name. Args: image_name: Name pattern to search for in image tags - docker_manager: Docker service dependency + docker_service: Docker service dependency Returns: Dictionary with list of available image tags """ - available_images = docker_manager.get_available_images() + available_images = docker_service.get_available_images() image_tags = [tag for image in available_images["images"] for tag in image.tags if image_name in tag] return {"available_images": image_tags} @router.get("/active-containers") -async def active_containers(docker_manager: DockerService = Depends(get_docker_service)): +async def active_containers(name_filter: str = None, docker_service: DockerService = Depends(get_docker_service)): """ Get all currently active (running) Docker containers. Args: - docker_manager: Docker service dependency + name_filter: Optional filter to match container names (case-insensitive) + docker_service: Docker service dependency Returns: List of active container information """ - return docker_manager.get_active_containers() + return docker_service.get_active_containers(name_filter) @router.get("/exited-containers") -async def exited_containers(docker_manager: DockerService = Depends(get_docker_service)): +async def exited_containers(name_filter: str = None, docker_service: DockerService = Depends(get_docker_service)): """ Get all exited (stopped) Docker containers. Args: - docker_manager: Docker service dependency + name_filter: Optional filter to match container names (case-insensitive) + docker_service: Docker service dependency Returns: List of exited container information """ - return docker_manager.get_exited_containers() + return docker_service.get_exited_containers(name_filter) @router.post("/clean-exited-containers") -async def clean_exited_containers(docker_manager: DockerService = Depends(get_docker_service)): +async def clean_exited_containers(docker_service: DockerService = Depends(get_docker_service)): """ Remove all exited Docker containers to free up space. Args: - docker_manager: Docker service dependency + docker_service: Docker service dependency Returns: Response from cleanup operation """ - return docker_manager.clean_exited_containers() + return docker_service.clean_exited_containers() @router.post("/remove-container/{container_name}") -async def remove_container(container_name: str, archive_locally: bool = True, s3_bucket: str = None, docker_manager: DockerService = Depends(get_docker_service), bot_archiver: BotArchiver = Depends(get_bot_archiver)): +async def remove_container(container_name: str, archive_locally: bool = True, s3_bucket: str = None, docker_service: DockerService = Depends(get_docker_service), bot_archiver: BotArchiver = Depends(get_bot_archiver)): """ - Remove a Docker container and optionally archive its data. + Remove a Hummingbot container and optionally archive its bot data. + + NOTE: This endpoint only works with Hummingbot containers (names starting with 'hummingbot-') + as it archives bot-specific data from the bots/instances directory. Args: - container_name: Name of the container to remove + container_name: Name of the Hummingbot container to remove archive_locally: Whether to archive data locally (default: True) s3_bucket: S3 bucket name for cloud archiving (optional) - docker_manager: Docker service dependency + docker_service: Docker service dependency bot_archiver: Bot archiver service dependency Returns: Response from container removal operation Raises: + HTTPException: 400 if container is not a Hummingbot container HTTPException: 500 if archiving fails """ + # Validate that this is a Hummingbot container + if not container_name.startswith("hummingbot-"): + raise HTTPException( + status_code=400, + detail=f"This endpoint only removes Hummingbot containers. Container '{container_name}' is not a Hummingbot container." + ) + # Remove the container - response = docker_manager.remove_container(container_name) + response = docker_service.remove_container(container_name) # Form the instance directory path correctly instance_dir = os.path.join('bots', 'instances', container_name) try: @@ -202,195 +131,61 @@ async def remove_container(container_name: str, archive_locally: bool = True, s3 @router.post("/stop-container/{container_name}") -async def stop_container(container_name: str, docker_manager: DockerService = Depends(get_docker_service)): +async def stop_container(container_name: str, docker_service: DockerService = Depends(get_docker_service)): """ Stop a running Docker container. Args: container_name: Name of the container to stop - docker_manager: Docker service dependency + docker_service: Docker service dependency Returns: Response from container stop operation """ - return docker_manager.stop_container(container_name) + return docker_service.stop_container(container_name) @router.post("/start-container/{container_name}") -async def start_container(container_name: str, docker_manager: DockerService = Depends(get_docker_service)): +async def start_container(container_name: str, docker_service: DockerService = Depends(get_docker_service)): """ Start a stopped Docker container. Args: container_name: Name of the container to start - docker_manager: Docker service dependency + docker_service: Docker service dependency Returns: Response from container start operation """ - return docker_manager.start_container(container_name) + return docker_service.start_container(container_name) @router.post("/pull-image/") -async def pull_image(image: DockerImage, background_tasks: BackgroundTasks, - docker_manager: DockerService = Depends(get_docker_service)): +async def pull_image(image: DockerImage, docker_service: DockerService = Depends(get_docker_service)): """ Initiate Docker image pull as background task. Returns immediately with task status for monitoring. Args: image: DockerImage object containing the image name to pull - background_tasks: FastAPI background tasks - docker_manager: Docker service dependency + docker_service: Docker service dependency Returns: Status of the pull operation initiation """ - image_name = image.image_name - - # Run cleanup before starting new pull - _cleanup_old_pull_status() - - # Check if pull is already in progress - if image_name in _pull_status: - current_status = _pull_status[image_name] - if current_status["status"] == "pulling": - return { - "message": f"Pull already in progress for {image_name}", - "status": "in_progress", - "started_at": current_status["started_at"], - "image_name": image_name - } - - # Start background pull - background_tasks.add_task(_background_pull_image, image_name, docker_manager) - - return { - "message": f"Pull started for {image_name}", - "status": "started", - "image_name": image_name, - "note": "Use GET /docker/pull-status/{image_name} to check progress" - } - - -@router.get("/pull-status/{image_name}") -async def get_pull_status(image_name: str): - """ - Get status of image pull operation. - - Args: - image_name: Name of the image to check pull status for - - Returns: - Dictionary with pull status, timing, and result information - - Raises: - HTTPException: 404 if no pull operation found for this image - """ - if image_name not in _pull_status: - raise HTTPException(status_code=404, detail=f"No pull operation found for image '{image_name}'") - - status_info = _pull_status[image_name].copy() - - # Add duration information - start_time = status_info.get("started_at") - if start_time: - if status_info["status"] == "pulling": - status_info["duration_seconds"] = round(time.time() - start_time, 2) - elif "completed_at" in status_info: - status_info["duration_seconds"] = round(status_info["completed_at"] - start_time, 2) - elif "failed_at" in status_info: - status_info["duration_seconds"] = round(status_info["failed_at"] - start_time, 2) - - return { - "image_name": image_name, - **status_info - } + result = docker_service.pull_image_async(image.image_name) + return result @router.get("/pull-status/") -async def list_pull_operations(): - """ - List all current and recent pull operations. - - Returns: - Dictionary with all pull operations and their statuses - """ - operations = {} - for image_name, status_info in _pull_status.items(): - status_copy = status_info.copy() - - # Add duration for each operation - start_time = status_copy.get("started_at") - if start_time: - if status_copy["status"] == "pulling": - status_copy["duration_seconds"] = round(time.time() - start_time, 2) - elif "completed_at" in status_copy: - status_copy["duration_seconds"] = round(status_copy["completed_at"] - start_time, 2) - elif "failed_at" in status_copy: - status_copy["duration_seconds"] = round(status_copy["failed_at"] - start_time, 2) - - operations[image_name] = status_copy - - return { - "pull_operations": operations, - "total_operations": len(operations) - } - - -@router.delete("/pull-status/{image_name}") -async def clear_pull_status(image_name: str): +async def get_pull_status(docker_service: DockerService = Depends(get_docker_service)): """ - Clear pull status for completed or failed operations. + Get status of all pull operations. Args: - image_name: Name of the image to clear status for - - Returns: - Success message when status is cleared + docker_service: Docker service dependency - Raises: - HTTPException: 400 if trying to clear ongoing operation, 404 if operation not found - """ - if image_name not in _pull_status: - raise HTTPException(status_code=404, detail=f"Pull operation for '{image_name}' not found") - - status = _pull_status[image_name]["status"] - if status == "pulling": - raise HTTPException( - status_code=400, - detail=f"Cannot clear status for ongoing pull operation. Current status: {status}" - ) - - del _pull_status[image_name] - return {"message": f"Cleared pull status for '{image_name}'"} - - -@router.delete("/pull-status/") -async def clear_all_completed_pull_status(): - """ - Clear all completed and failed pull operations from status tracking. - Returns: - Summary of cleared operations + Dictionary with all pull operations and their statuses """ - cleared_count = 0 - cleared_images = [] - - # Create a list of items to remove to avoid modifying dict during iteration - to_remove = [] - for image_name, status_info in _pull_status.items(): - if status_info["status"] in ["completed", "failed"]: - to_remove.append(image_name) - - # Remove the completed/failed operations - for image_name in to_remove: - del _pull_status[image_name] - cleared_images.append(image_name) - cleared_count += 1 - - return { - "message": f"Cleared {cleared_count} completed/failed pull operations", - "cleared_images": cleared_images, - "remaining_operations": len(_pull_status) - } + return docker_service.get_all_pull_status() From c96b3152fae7b2141b09b02130234f814b785512 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 21:10:51 +0200 Subject: [PATCH 141/244] (feat) add clean up of threading task --- main.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/main.py b/main.py index edd696b0..c1b9faa3 100644 --- a/main.py +++ b/main.py @@ -118,6 +118,9 @@ async def lifespan(app: FastAPI): # Stop market data feed manager (which will stop all feeds) market_data_feed_manager.stop() + # Clean up docker service + docker_service.cleanup() + # Close database connections await accounts_service.db_manager.close() From a985f8e54ab9b5dc44cf295871315edcc468093a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 23:42:57 +0200 Subject: [PATCH 142/244] (feat) move rewards calculation to trade performance --- utils/hummingbot_database_reader.py | 98 +++++++++++++++++++++++++++++ 1 file changed, 98 insertions(+) diff --git a/utils/hummingbot_database_reader.py b/utils/hummingbot_database_reader.py index 8ece1f61..520e8dc5 100644 --- a/utils/hummingbot_database_reader.py +++ b/utils/hummingbot_database_reader.py @@ -87,6 +87,104 @@ def get_controllers_data(self) -> pd.DataFrame: controllers = pd.read_sql_query(text(query), session.connection()) return controllers + def calculate_trade_based_performance(self) -> pd.DataFrame: + """ + Calculate trade-based performance metrics using vectorized pandas operations. + + Returns: + DataFrame with rolling performance metrics calculated per trading pair. + """ + # Get trade fills data + trades = self.get_trade_fills() + + if len(trades) == 0: + return pd.DataFrame() + + # Sort by timestamp to ensure proper rolling calculation + trades = trades.sort_values(['symbol', 'market', 'timestamp']).copy() + + # Create buy/sell indicator columns + trades['is_buy'] = (trades['trade_type'].str.upper() == 'BUY').astype(int) + trades['is_sell'] = (trades['trade_type'].str.upper() == 'SELL').astype(int) + + # Calculate buy and sell amounts and values vectorized + trades['buy_amount'] = trades['amount'] * trades['is_buy'] + trades['sell_amount'] = trades['amount'] * trades['is_sell'] + trades['buy_value'] = trades['price'] * trades['amount'] * trades['is_buy'] + trades['sell_value'] = trades['price'] * trades['amount'] * trades['is_sell'] + + # Group by symbol and market for rolling calculations + grouper = ['symbol', 'market'] + + # Calculate cumulative volumes and values + trades['buy_volume'] = trades.groupby(grouper)['buy_amount'].cumsum() + trades['sell_volume'] = trades.groupby(grouper)['sell_amount'].cumsum() + trades['buy_value_cum'] = trades.groupby(grouper)['buy_value'].cumsum() + trades['sell_value_cum'] = trades.groupby(grouper)['sell_value'].cumsum() + + # Calculate average prices (avoid division by zero) + trades['buy_avg_price'] = trades['buy_value_cum'] / trades['buy_volume'].replace(0, pd.NA) + trades['sell_avg_price'] = trades['sell_value_cum'] / trades['sell_volume'].replace(0, pd.NA) + + # Forward fill average prices within each group to handle NaN values + trades['buy_avg_price'] = trades.groupby(grouper)['buy_avg_price'].ffill().fillna(0) + trades['sell_avg_price'] = trades.groupby(grouper)['sell_avg_price'].ffill().fillna(0) + + # Calculate net position + trades['net_position'] = trades['buy_volume'] - trades['sell_volume'] + + # Calculate realized PnL + trades['realized_trade_pnl_pct'] = ( + (trades['sell_avg_price'] - trades['buy_avg_price']) / trades['buy_avg_price'] + ).fillna(0) + + # Matched volume for realized PnL (minimum of buy and sell volumes) + trades['matched_volume'] = pd.concat([trades['buy_volume'], trades['sell_volume']], axis=1).min(axis=1) + trades['realized_trade_pnl_quote'] = trades['realized_trade_pnl_pct'] * trades['matched_volume'] + + # Calculate unrealized PnL based on position direction + # For long positions (net_position > 0): use current price vs buy_avg_price + # For short positions (net_position < 0): use sell_avg_price vs current price + trades['unrealized_trade_pnl_pct'] = 0.0 + + # Long positions + long_mask = trades['net_position'] > 0 + trades.loc[long_mask, 'unrealized_trade_pnl_pct'] = ( + (trades.loc[long_mask, 'price'] - trades.loc[long_mask, 'buy_avg_price']) / + trades.loc[long_mask, 'buy_avg_price'] + ).fillna(0) + + # Short positions + short_mask = trades['net_position'] < 0 + trades.loc[short_mask, 'unrealized_trade_pnl_pct'] = ( + (trades.loc[short_mask, 'sell_avg_price'] - trades.loc[short_mask, 'price']) / + trades.loc[short_mask, 'sell_avg_price'] + ).fillna(0) + + # Calculate unrealized PnL in quote currency + trades['unrealized_trade_pnl_quote'] = trades['unrealized_trade_pnl_pct'] * trades['net_position'].abs() + + # Fees are already in trade_fee_in_quote column + trades['fees_quote'] = trades['trade_fee_in_quote'] + + # Calculate net PnL + trades['net_pnl_quote'] = ( + trades['realized_trade_pnl_quote'] + + trades['unrealized_trade_pnl_quote'] - + trades['fees_quote'] + ) + + # Select and return relevant columns + result_columns = [ + 'timestamp', 'price', 'amount', 'trade_type', 'symbol', 'market', + 'buy_avg_price', 'buy_volume', 'sell_avg_price', 'sell_volume', + 'net_position', 'realized_trade_pnl_pct', 'realized_trade_pnl_quote', + 'unrealized_trade_pnl_pct', 'unrealized_trade_pnl_quote', + 'fees_quote', 'net_pnl_quote' + ] + + return trades[result_columns].sort_values('timestamp') + class PerformanceDataSource: From c87134b17b9ebbb786b7fe65c716d031b29bf672 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 23 Jun 2025 23:43:12 +0200 Subject: [PATCH 143/244] (feat) adapt archived bots to new performance calculation --- routers/archived_bots.py | 42 ++++++++++++++++++++++------------------ 1 file changed, 23 insertions(+), 19 deletions(-) diff --git a/routers/archived_bots.py b/routers/archived_bots.py index 123aa0ca..3641ed0f 100644 --- a/routers/archived_bots.py +++ b/routers/archived_bots.py @@ -75,43 +75,47 @@ async def get_database_summary(db_path: str): @router.get("/{db_path:path}/performance") async def get_database_performance(db_path: str): """ - Get detailed performance analysis for a bot database. + Get trade-based performance analysis for a bot database. Args: db_path: Full path to the database file Returns: - Detailed performance metrics including PnL, sharpe ratio, etc. + Trade-based performance metrics with rolling calculations """ try: db = HummingbotDatabase(db_path) - # Get executors data - executors = db.get_executors_data() + # Use new trade-based performance calculation + performance_data = db.calculate_trade_based_performance() - if len(executors) == 0: + if len(performance_data) == 0: return { "db_path": db_path, - "error": "No executors found in database", - "results": {} + "error": "No trades found in database", + "performance_data": [] } - # Convert to performance data source - executors_dict = executors.to_dict('records') - data_source = PerformanceDataSource(executors_dict) - - # Calculate performance - backtesting_engine = BacktestingEngineBase() - executor_info_list = data_source.executor_info_list - results = backtesting_engine.summarize_results(executor_info_list) + # Convert to records for JSON response + performance_records = performance_data.to_dict('records') - # Clean up results - results["sharpe_ratio"] = results["sharpe_ratio"] if results["sharpe_ratio"] is not None else 0 + # Calculate summary statistics + final_row = performance_data.iloc[-1] if len(performance_data) > 0 else {} + summary = { + "total_trades": len(performance_data), + "final_net_pnl_quote": float(final_row.get('net_pnl_quote', 0)), + "final_realized_pnl_quote": float(final_row.get('realized_trade_pnl_quote', 0)), + "final_unrealized_pnl_quote": float(final_row.get('unrealized_trade_pnl_quote', 0)), + "total_fees_quote": float(performance_data['fees_quote'].sum()), + "final_net_position": float(final_row.get('net_position', 0)), + "trading_pairs": performance_data['symbol'].unique().tolist(), + "markets": performance_data['market'].unique().tolist() + } return { "db_path": db_path, - "results": results, - "executor_count": len(executor_info_list) + "summary": summary, + "performance_data": performance_records } except Exception as e: From bf738acabf8a4dd679094659a2abb48e436f6e2c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 24 Jun 2025 17:11:14 +0200 Subject: [PATCH 144/244] (feat) update positions during initialization --- utils/connector_manager.py | 1 + 1 file changed, 1 insertion(+) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index ba8082d4..8b7f0996 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -246,6 +246,7 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na # Set default position mode to HEDGE for perpetual connectors if "_perpetual" in connector_name: + await connector._update_positions() if PositionMode.HEDGE in connector.supported_position_modes(): connector.set_position_mode(PositionMode.HEDGE) From d5f2cd2bf0b189bc0066fa5fb418ee52901e9f22 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 26 Jun 2025 19:16:50 +0200 Subject: [PATCH 145/244] (feat) add other conflictive token --- setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.sh b/setup.sh index 53d869e2..4ccbf2be 100755 --- a/setup.sh +++ b/setup.sh @@ -46,7 +46,7 @@ AWS_API_KEY="" AWS_SECRET_KEY="" S3_BUCKET="" LOGFIRE_ENV="dev" -BANNED_TOKENS='["NAV","ARS","ETHW","ETHF"]' +BANNED_TOKENS='["NAV","ARS","ETHW","ETHF","NEWT"]' echo "" echo -e "${GREEN}✅ Using sensible defaults for MQTT, Database, and other settings${NC}" From f29a0da01b989f622324fb5f74d03e0b9fe8857b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 27 Jun 2025 02:58:40 +0200 Subject: [PATCH 146/244] (feat) adapt conf client to new hummingbot version --- bots/credentials/master_account/conf_client.yml | 16 ---------------- 1 file changed, 16 deletions(-) diff --git a/bots/credentials/master_account/conf_client.yml b/bots/credentials/master_account/conf_client.yml index 2ad547af..7093ae65 100644 --- a/bots/credentials/master_account/conf_client.yml +++ b/bots/credentials/master_account/conf_client.yml @@ -42,9 +42,6 @@ mqtt_bridge: # Error log sharing send_error_logs: true -# Can store the previous strategy ran for quick retrieval. -previous_strategy: some-strategy.yml - # Advanced database options, currently supports SQLAlchemy's included dialects # Reference: https://docs.sqlalchemy.org/en/13/dialects/ # To use an instance of SQLite DB the required configuration is @@ -114,19 +111,6 @@ certs_path: /Users/dardonacci/Documents/work/hummingbot/certs anonymized_metrics_mode: anonymized_metrics_interval_min: 15.0 -# Command Shortcuts -# Define abbreviations for often used commands -# or batch grouped commands together -command_shortcuts: -- command: spreads - help: Set bid and ask spread - arguments: - - Bid Spread - - Ask Spread - output: - - config bid_spread $1 - - config ask_spread $2 - # A source for rate oracle, currently ascend_ex, binance, coin_gecko, coin_cap, kucoin, gate_io rate_oracle_source: name: binance From 20dcd5ded9109d3e70645d611721c44a1171cfd1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 27 Jun 2025 20:06:48 +0200 Subject: [PATCH 147/244] (feat) release version 1 --- main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index c1b9faa3..75da8763 100644 --- a/main.py +++ b/main.py @@ -127,9 +127,9 @@ async def lifespan(app: FastAPI): # Initialize FastAPI with metadata and lifespan app = FastAPI( - title="Hummingbot Backend API", + title="Hummingbot API", description="API for managing Hummingbot trading instances", - version="0.1.0", + version="1.0.0", lifespan=lifespan, ) From 45c2d613e4c16c610e90c8073868b335b9e82886 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 27 Jun 2025 20:18:29 +0200 Subject: [PATCH 148/244] (feat) update readme --- README.md | 230 +++++++++++++++++++++++++++++++++++++++--------------- 1 file changed, 167 insertions(+), 63 deletions(-) diff --git a/README.md b/README.md index 04651c1c..ac53f6d7 100644 --- a/README.md +++ b/README.md @@ -1,82 +1,186 @@ -# Backend API +# Hummingbot API -## Overview -Backend-api is a dedicated solution for managing Hummingbot instances. It offers a robust backend API to streamline the deployment, management, and interaction with Hummingbot containers. This tool is essential for administrators and developers looking to efficiently handle various aspects of Hummingbot operations. +A comprehensive RESTful API framework for managing trading operations across multiple exchanges. The Hummingbot API provides a centralized platform to aggregate all your trading functionalities, from basic account management to sophisticated automated trading strategies. -## Features -- **Deployment File Management**: Manage files necessary for deploying new Hummingbot instances. -- **Container Control**: Effortlessly start and stop Hummingbot containers. -- **Archiving Options**: Securely archive containers either locally or on Amazon S3 post-removal. -- **Direct Messaging**: Communicate with Hummingbots through the broker for effective control and coordination. +## What is Hummingbot API? -## Getting Started +The Hummingbot API is designed to be your central hub for trading operations, offering: + +- **Multi-Exchange Account Management**: Create and manage multiple trading accounts across different exchanges +- **Portfolio Monitoring**: Real-time balance tracking and portfolio distribution analysis +- **Trade Execution**: Execute trades, manage orders, and monitor positions across all your accounts +- **Automated Trading**: Deploy and control Hummingbot instances with automated strategies +- **Strategy Management**: Add, configure, and manage trading strategies in real-time +- **Complete Flexibility**: Build any trading product on top of this robust API framework + +Whether you're building a trading dashboard, implementing algorithmic strategies, or creating a comprehensive trading platform, the Hummingbot API provides all the tools you need. + +## System Dependencies + +The Hummingbot API requires two essential services to function properly: + +### 1. PostgreSQL Database +Stores all trading data including: +- Orders and trade history +- Account states and balances +- Positions and funding payments +- Performance metrics + +### 2. EMQX Message Broker +Enables real-time communication with trading bots: +- Receives live updates from running bots +- Sends commands to control bot execution +- Handles real-time data streaming + +## Installation & Setup + +### Prerequisites +- Docker and Docker Compose installed +- Git for cloning the repository + +### Quick Start -### Development Setup - -1. **Initial Setup**: - - Run the setup script to configure environment variables and start required containers (EMQX and PostgreSQL): - ```bash - ./setup.sh - ``` - - This script will set up the `.env` file and start the necessary Docker containers for the message broker and database. - -2. **Development Mode**: - - Use the run script with the `--dev` flag to run the API from source: - ```bash - ./run.sh --dev - ``` - - This will activate the conda environment and run the API with uvicorn for development with hot reload. - -3. **Production Mode**: - - Use the run script without flags to run with Docker Compose: - ```bash - ./run.sh - ``` - - This will start all services using Docker Compose in detached mode. - -### Manual Setup (Alternative) - -#### Conda Installation -1. Install the environment using Conda: +1. **Clone the repository** ```bash - conda env create -f environment.yml + git clone + cd backend-api ``` -2. Activate the Conda environment: + +2. **Make setup script executable and run it** ```bash - conda activate backend-api + chmod +x setup.sh + ./setup.sh ``` -#### Running the API with Conda -Run the API using uvicorn with the following command: +3. **Configure your environment** + During setup, you'll configure several important variables: + + - **Config Password**: Used to encrypt and hash API keys and credentials for security + - **Username & Password**: Basic authentication credentials for API access (used by dashboards and other systems) + - **Additional configurations**: Available in the `.env` file including: + - Broker configuration (EMQX settings) + - Database URL + - Market data cleanup settings + - AWS S3 configuration (experimental) + - Banned tokens list (for delisted tokens) + +4. **Set up monitoring (Production recommended)** + For production deployments, add observability through Logfire: ```bash - uvicorn main:app --reload + export LOGFIRE_TOKEN=your_token_here ``` + Learn more: [Logfire Documentation](https://logfire.pydantic.dev/docs/) + +After running `setup.sh`, the required Docker images (EMQX, PostgreSQL, and Hummingbot) will be running and ready. + +## Running the API -#### Docker Installation and Running the API -For running the project using Docker, follow these steps: +You have two deployment options depending on your use case: -1. **Set up Environment Variables**: - - Execute the `setup.sh` script to configure the necessary environment variables in the `.env` file: - ```bash - ./setup.sh - ``` +### For Users (Production/Simple Deployment) +```bash +./run.sh +``` +This runs the API in a Docker container - simple and isolated. -2. **Build and Run with Docker Compose**: - - After setting up the environment variables, use Docker Compose to build and run the project: - ```bash - docker compose up --build - ``` +### For Developers (Development Environment) +1. **Install Conda** (if not already installed) +2. **Set up the development environment** + ```bash + make install + ``` + This creates a Conda environment with all dependencies. - - This command will build the Docker image and start the containers as defined in your `docker-compose.yml` file. +3. **Run in development mode** + ```bash + ./run.sh --dev + ``` + This starts the API from source with hot-reloading enabled. -### Usage -This API is designed for: -- **Deploying Hummingbot instances** -- **Starting/Stopping Containers** -- **Archiving Hummingbots** -- **Messaging with Hummingbot instances** +## Getting Started -To test these endpoints, you can use the [Swagger UI](http://localhost:8000/docs) or [Redoc](http://localhost:8000/redoc). +Once the API is running, you can access it at `http://localhost:8000` + +### First Steps +1. **Visit the API Documentation**: Go to `http://localhost:8000/docs` to explore the interactive Swagger documentation +2. **Authenticate**: Use the username and password you configured during setup +3. **Test endpoints**: Use the Swagger interface to test API functionality + +## API Overview + +The Hummingbot API is organized into several functional routers: + +### =3 Docker Management (`/docker`) +- Check running containers and images +- Pull new Docker images +- Start, stop, and remove containers +- Monitor container status and health + +### =d Account Management (`/accounts`) +- Create and delete trading accounts +- Add/remove exchange credentials +- Monitor account states and balances +- View portfolio distribution +- Track positions and funding payments + +### = Trading Operations (`/trading`) +- Place and cancel orders across exchanges +- Monitor order status and execution +- Set leverage and position modes +- View trade history and performance +- Real-time portfolio monitoring + +### > Bot Orchestration (`/bot-orchestration`) +- Discover and manage active bots +- Deploy new Hummingbot instances +- Start/stop automated strategies +- Monitor bot performance in real-time + +### < Strategy Management +- **Controllers** (`/controllers`): Manage advanced strategy controllers +- **Scripts** (`/scripts`): Handle traditional Hummingbot scripts +- Create, edit, and remove strategy files +- Configure strategy parameters + +### = Market Data (`/market-data`) +- Access real-time and historical candles +- Get trading rules and exchange information +- Monitor funding rates +- Stream live market data + +### =, Backtesting (`/backtesting`) +- Test strategies against historical data +- Analyze strategy performance +- Optimize parameters + +### = Analytics (`/archived-bots`) +- Analyze performance of stopped bots +- Generate comprehensive reports +- Review historical trades and orders +- Extract insights from past strategies + +## Authentication + +All API endpoints require HTTP Basic Authentication. Include your configured credentials in all requests: + +```bash +curl -u username:password http://localhost:8000/endpoint +``` + +## Support & Documentation + +- **API Documentation**: Available at `http://localhost:8000/docs` when running +- **Detailed Examples**: Check the `CLAUDE.md` file for comprehensive API usage examples +- **Issues**: Report bugs and feature requests through the project's issue tracker ## Contributing -Contributions are welcome! For support or queries, please contact us on Discord. + +We welcome contributions! Please ensure you: +1. Set up the development environment using `make install` +2. Run pre-commit hooks with `make install-pre-commit` +3. Follow the existing code style (Black formatter with 130 character line length) +4. Test your changes thoroughly + +--- + +Ready to start trading? Deploy your first account and start exploring the powerful capabilities of the Hummingbot API! \ No newline at end of file From 26f75020b0e4e2bd6c63569502738eb020364c5a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 27 Jun 2025 20:19:03 +0200 Subject: [PATCH 149/244] (feat) remove contributing --- README.md | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index ac53f6d7..62f72e42 100644 --- a/README.md +++ b/README.md @@ -123,7 +123,7 @@ The Hummingbot API is organized into several functional routers: - View portfolio distribution - Track positions and funding payments -### = Trading Operations (`/trading`) +### =� Trading Operations (`/trading`) - Place and cancel orders across exchanges - Monitor order status and execution - Set leverage and position modes @@ -136,13 +136,13 @@ The Hummingbot API is organized into several functional routers: - Start/stop automated strategies - Monitor bot performance in real-time -### < Strategy Management +### <� Strategy Management - **Controllers** (`/controllers`): Manage advanced strategy controllers - **Scripts** (`/scripts`): Handle traditional Hummingbot scripts - Create, edit, and remove strategy files - Configure strategy parameters -### = Market Data (`/market-data`) +### =� Market Data (`/market-data`) - Access real-time and historical candles - Get trading rules and exchange information - Monitor funding rates @@ -153,7 +153,7 @@ The Hummingbot API is organized into several functional routers: - Analyze strategy performance - Optimize parameters -### = Analytics (`/archived-bots`) +### =� Analytics (`/archived-bots`) - Analyze performance of stopped bots - Generate comprehensive reports - Review historical trades and orders @@ -172,15 +172,6 @@ curl -u username:password http://localhost:8000/endpoint - **API Documentation**: Available at `http://localhost:8000/docs` when running - **Detailed Examples**: Check the `CLAUDE.md` file for comprehensive API usage examples - **Issues**: Report bugs and feature requests through the project's issue tracker - -## Contributing - -We welcome contributions! Please ensure you: -1. Set up the development environment using `make install` -2. Run pre-commit hooks with `make install-pre-commit` -3. Follow the existing code style (Black formatter with 130 character line length) -4. Test your changes thoroughly - --- Ready to start trading? Deploy your first account and start exploring the powerful capabilities of the Hummingbot API! \ No newline at end of file From cd613bc4b3ed2de5b31b52ca6472b9f32a490ae3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 27 Jun 2025 20:21:33 +0200 Subject: [PATCH 150/244] (feat) add url --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 62f72e42..20e2c013 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ Enables real-time communication with trading bots: 1. **Clone the repository** ```bash - git clone + git clone https://github.com/hummingbot/backend-api.git cd backend-api ``` From 1e9d1798b58ae75aff38e8cfb0a477ef35c85a9e Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 28 Jun 2025 01:52:38 +0200 Subject: [PATCH 151/244] (feat) improve init of positions and symbol map --- utils/connector_manager.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 8b7f0996..271268d6 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -240,15 +240,18 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na # Start the connector's network without order book tracker self._start_network_without_order_book(connector) + + # Initialize symbol map + await connector._initialize_trading_pair_symbol_map() # Update initial balances await connector._update_balances() # Set default position mode to HEDGE for perpetual connectors if "_perpetual" in connector_name: - await connector._update_positions() if PositionMode.HEDGE in connector.supported_position_modes(): connector.set_position_mode(PositionMode.HEDGE) + await connector._update_positions() logging.info(f"Initialized connector {connector_name} for account {account_name}") return connector From ebb30ad6b23326bc1b0afce34644522b498055ad Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 28 Jun 2025 01:52:47 +0200 Subject: [PATCH 152/244] (feat) simplify account routes --- routers/accounts.py | 740 +++++++++++++++++++++++--------------------- 1 file changed, 395 insertions(+), 345 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index 894bd827..31e7f45a 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -14,18 +14,37 @@ # Portfolio & Account State Monitoring @router.get("/portfolio/state", response_model=Dict[str, Dict[str, List[Dict]]]) -async def get_portfolio_state(accounts_service: AccountsService = Depends(get_accounts_service)): +async def get_portfolio_state( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get the current state of all accounts portfolio. + Get the current state of all or filtered accounts portfolio. + Args: + account_names: Optional list of account names to filter by + Returns: - Dict containing all account states with connector balances and token information + Dict containing account states with connector balances and token information """ - return accounts_service.get_accounts_state() + all_states = accounts_service.get_accounts_state() + + # If no filter, return all accounts + if not account_names: + return all_states + + # Filter by requested accounts + filtered_states = {} + for account_name in account_names: + if account_name in all_states: + filtered_states[account_name] = all_states[account_name] + + return filtered_states @router.get("/portfolio/history", response_model=PaginatedResponse) async def get_portfolio_history( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), start_time: datetime = Query(default=None, description="Start time for filtering"), @@ -33,15 +52,47 @@ async def get_portfolio_history( accounts_service: AccountsService = Depends(get_accounts_service) ): """ - Get the historical state of all accounts portfolio with pagination. + Get the historical state of all or filtered accounts portfolio with pagination. + + Args: + account_names: Optional list of account names to filter by + limit: Number of items per page (1-1000) + cursor: Cursor for pagination (ISO timestamp) + start_time: Start time for filtering results + end_time: End time for filtering results + + Returns: + Paginated response with historical portfolio data """ try: - data, next_cursor, has_more = await accounts_service.load_account_state_history( - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) + if not account_names: + # Get history for all accounts + data, next_cursor, has_more = await accounts_service.load_account_state_history( + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + else: + # Get history for specific accounts - need to aggregate + all_data = [] + for account_name in account_names: + acc_data, _, _ = await accounts_service.get_account_state_history( + account_name=account_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + all_data.extend(acc_data) + + # Sort by timestamp and apply pagination + all_data.sort(key=lambda x: x.get("timestamp", ""), reverse=True) + + # Apply limit + data = all_data[:limit] + has_more = len(all_data) > limit + next_cursor = data[-1]["timestamp"] if data and has_more else None return PaginatedResponse( data=data, @@ -49,122 +100,378 @@ async def get_portfolio_history( "limit": limit, "has_more": has_more, "next_cursor": next_cursor, - "current_cursor": cursor + "current_cursor": cursor, + "filters": { + "account_names": account_names, + "start_time": start_time.isoformat() if start_time else None, + "end_time": end_time.isoformat() if end_time else None + } } ) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) -@router.get("/portfolio/state/{account_name}", response_model=Dict[str, List[Dict]]) -async def get_account_portfolio_state(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + + +@router.get("/portfolio/distribution") +async def get_portfolio_distribution( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get current portfolio state of a specific account. + Get portfolio distribution by tokens with percentages across all or filtered accounts. Args: - account_name: Name of the account to get portfolio state for + account_names: Optional list of account names to filter by Returns: - Dictionary mapping connector names to lists of token information - - Raises: - HTTPException: 404 if account not found + Dictionary with token distribution including percentages, values, and breakdown by accounts/connectors """ - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - return state + if not account_names: + # Get distribution for all accounts + return accounts_service.get_portfolio_distribution() + elif len(account_names) == 1: + # Single account - use existing method + return accounts_service.get_portfolio_distribution(account_names[0]) + else: + # Multiple accounts - need to aggregate + aggregated_distribution = { + "tokens": {}, + "total_value": 0, + "token_count": 0, + "accounts": {} + } + + for account_name in account_names: + account_dist = accounts_service.get_portfolio_distribution(account_name) + + # Skip if account doesn't exist or has error + if account_dist.get("error") or account_dist.get("token_count", 0) == 0: + continue + + # Aggregate token data + for token, token_data in account_dist.get("tokens", {}).items(): + if token not in aggregated_distribution["tokens"]: + aggregated_distribution["tokens"][token] = { + "token": token, + "value": 0, + "percentage": 0, + "accounts": {} + } + + aggregated_distribution["tokens"][token]["value"] += token_data.get("value", 0) + + # Copy account-specific data + for acc_name, acc_data in token_data.get("accounts", {}).items(): + aggregated_distribution["tokens"][token]["accounts"][acc_name] = acc_data + + aggregated_distribution["total_value"] += account_dist.get("total_value", 0) + aggregated_distribution["accounts"][account_name] = account_dist.get("accounts", {}).get(account_name, {}) + + # Recalculate percentages + total_value = aggregated_distribution["total_value"] + if total_value > 0: + for token_data in aggregated_distribution["tokens"].values(): + token_data["percentage"] = (token_data["value"] / total_value) * 100 + + aggregated_distribution["token_count"] = len(aggregated_distribution["tokens"]) + + return aggregated_distribution -@router.get("/portfolio/history/{account_name}", response_model=PaginatedResponse) -async def get_account_portfolio_history( - account_name: str, - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), + + +@router.get("/portfolio/accounts-distribution") +async def get_accounts_distribution( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), accounts_service: AccountsService = Depends(get_accounts_service) ): """ - Get historical portfolio state of a specific account with pagination. + Get portfolio distribution by accounts with percentages. Args: - account_name: Name of the account to get history for - limit: Number of items per page (1-1000) - cursor: Cursor for pagination (ISO timestamp) - start_time: Start time for filtering results - end_time: End time for filtering results + account_names: Optional list of account names to filter by Returns: - Paginated response with historical account portfolio data - """ - data, next_cursor, has_more = await accounts_service.get_account_state_history( - account_name=account_name, - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) + Dictionary with account distribution including percentages, values, and breakdown by connectors + """ + all_distribution = accounts_service.get_account_distribution() - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor, - "filters": { - "account_name": account_name, - "start_time": start_time.isoformat() if start_time else None, - "end_time": end_time.isoformat() if end_time else None - } - } - ) + # If no filter, return all accounts + if not account_names: + return all_distribution + + # Filter the distribution by requested accounts + filtered_distribution = { + "accounts": {}, + "total_value": 0, + "account_count": 0 + } + + for account_name in account_names: + if account_name in all_distribution.get("accounts", {}): + filtered_distribution["accounts"][account_name] = all_distribution["accounts"][account_name] + filtered_distribution["total_value"] += all_distribution["accounts"][account_name].get("total_value", 0) + + # Recalculate percentages + total_value = filtered_distribution["total_value"] + if total_value > 0: + for account_data in filtered_distribution["accounts"].values(): + account_data["percentage"] = (account_data.get("total_value", 0) / total_value) * 100 + + filtered_distribution["account_count"] = len(filtered_distribution["accounts"]) + + return filtered_distribution -@router.get("/portfolio/distribution") -async def get_portfolio_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): +@router.get("/positions", response_model=List[Dict]) +async def get_positions( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get portfolio distribution by tokens with percentages across all accounts. - + Get current positions across all or filtered perpetual connectors. + + This endpoint fetches real-time position data directly from the connectors, + including unrealized PnL, leverage, funding fees, and margin information. + + Args: + account_names: Optional list of account names to filter by + connector_names: Optional list of connector names to filter by + Returns: - Dictionary with token distribution including percentages, values, and breakdown by accounts/connectors + List of current position dictionaries with real-time data from filtered accounts/connectors + + Raises: + HTTPException: 500 if there's an error fetching positions """ - return accounts_service.get_portfolio_distribution() + try: + all_positions = [] + all_connectors = accounts_service.connector_manager.get_all_connectors() + + # Filter accounts + accounts_to_check = account_names if account_names else list(all_connectors.keys()) + + for account_name in accounts_to_check: + if account_name in all_connectors: + # Filter connectors + connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + + for connector_name in connectors_to_check: + # Only fetch positions from perpetual connectors + if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: + try: + positions = await accounts_service.get_account_positions(account_name, connector_name) + all_positions.extend(positions) + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") + + return all_positions + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") -@router.get("/portfolio/distribution/{account_name}") -async def get_account_portfolio_distribution(account_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + +@router.get("/positions/snapshots", response_model=List[Dict]) +async def get_position_snapshots( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get portfolio distribution by tokens with percentages for a specific account. - + Get latest position snapshots from database for historical analysis. + + Returns the most recent position snapshots for all or filtered accounts, + optionally filtered by connectors. Useful for tracking position history + and performance over time. + Args: - account_name: Name of the account to get distribution for - + account_names: Optional list of account names to filter by + connector_names: Optional list of connector names to filter by + Returns: - Dictionary with token distribution for the specified account - + List of latest position snapshot dictionaries from database + Raises: - HTTPException: 404 if account not found + HTTPException: 500 if there's an error fetching snapshots """ - result = accounts_service.get_portfolio_distribution(account_name) - - # Check if account exists by looking at the distribution - if result.get("token_count", 0) == 0 and not result.get("error") and account_name not in accounts_service.get_accounts_state(): - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - return result + try: + all_snapshots = [] + + # Get all accounts if not specified + if not account_names: + account_names = accounts_service.list_accounts() + + for account_name in account_names: + try: + # If specific connectors are requested, fetch each separately + if connector_names: + for connector_name in connector_names: + snapshots = await accounts_service.get_position_snapshots(account_name, connector_name) + all_snapshots.extend(snapshots) + else: + # Get all snapshots for the account + snapshots = await accounts_service.get_position_snapshots(account_name, None) + all_snapshots.extend(snapshots) + except Exception as e: + # Log error but continue with other accounts + import logging + logging.warning(f"Failed to get position snapshots for {account_name}: {e}") + + return all_snapshots + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching position snapshots: {str(e)}") -@router.get("/portfolio/accounts-distribution") -async def get_accounts_distribution(accounts_service: AccountsService = Depends(get_accounts_service)): + +@router.get("/funding-payments", response_model=List[Dict]) +async def get_funding_payments( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), + trading_pair: Optional[str] = Query(default=None, description="Filter by trading pair"), + limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get portfolio distribution by accounts with percentages. - + Get funding payment history across all or filtered perpetual connectors. + + This endpoint retrieves historical funding payment records including + funding rates, payment amounts, and position data at time of payment. + + Args: + account_names: Optional list of account names to filter by + connector_names: Optional list of connector names to filter by + trading_pair: Optional trading pair filter + limit: Maximum number of records to return + Returns: - Dictionary with account distribution including percentages, values, and breakdown by connectors + List of funding payment records with rates, amounts, and position data + + Raises: + HTTPException: 500 if there's an error fetching funding payments + """ + try: + all_funding_payments = [] + all_connectors = accounts_service.connector_manager.get_all_connectors() + + # Filter accounts + accounts_to_check = account_names if account_names else list(all_connectors.keys()) + + for account_name in accounts_to_check: + if account_name in all_connectors: + # Filter connectors + connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + + for connector_name in connectors_to_check: + # Only fetch funding payments from perpetual connectors + if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: + try: + payments = await accounts_service.get_funding_payments( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + limit=limit + ) + all_funding_payments.extend(payments) + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") + + # Sort by timestamp (most recent first) + all_funding_payments.sort(key=lambda x: x.get("timestamp", ""), reverse=True) + + # Apply limit to the combined results + return all_funding_payments[:limit] + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") + + +@router.get("/funding-fees/summary", response_model=List[Dict]) +async def get_funding_fees_summary( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), + trading_pairs: Optional[List[str]] = Query(default=None, description="Filter by trading pairs"), + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - return accounts_service.get_account_distribution() + Get total funding fees summary across all or filtered perpetual connectors. + + This endpoint provides aggregated funding fee information including + total fees paid/received, payment count, and fee currency for each + trading pair across the filtered accounts and connectors. + + Args: + account_names: Optional list of account names to filter by + connector_names: Optional list of connector names to filter by + trading_pairs: Optional list of trading pairs to filter by + + Returns: + List of funding fee summaries by trading pair with totals + + Raises: + HTTPException: 500 if there's an error calculating fees + """ + try: + all_fee_summaries = [] + all_connectors = accounts_service.connector_manager.get_all_connectors() + + # Filter accounts + accounts_to_check = account_names if account_names else list(all_connectors.keys()) + + for account_name in accounts_to_check: + if account_name in all_connectors: + # Filter connectors + connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + + for connector_name in connectors_to_check: + # Only get fees from perpetual connectors + if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: + # Get all trading pairs for this connector if not specified + pairs_to_check = trading_pairs if trading_pairs else [] + + # If no specific pairs requested, get all available pairs from funding payments + if not pairs_to_check: + try: + # Get a sample of funding payments to find available pairs + payments = await accounts_service.get_funding_payments( + account_name=account_name, + connector_name=connector_name, + limit=1000 + ) + # Extract unique trading pairs + pairs_to_check = list( + set(p.get("trading_pair") for p in payments if p.get("trading_pair"))) + except Exception: + continue + + # Get fee summary for each pair + for trading_pair in pairs_to_check: + try: + fee_summary = await accounts_service.get_total_funding_fees( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair + ) + # Add account and connector info to the summary + fee_summary["account_name"] = account_name + fee_summary["connector_name"] = connector_name + all_fee_summaries.append(fee_summary) + except Exception as e: + # Log error but continue with other pairs + import logging + logging.warning( + f"Failed to get funding fees for {account_name}/{connector_name}/{trading_pair}: {e}") + + return all_fee_summaries + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error calculating funding fees: {str(e)}") @router.get("/connectors", response_model=List[str]) @@ -316,260 +623,3 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di except Exception as e: await accounts_service.delete_credentials(account_name, connector_name) raise HTTPException(status_code=400, detail=str(e)) - - -# Position Management Endpoints - -@router.get("/{account_name}/{connector_name}/positions", response_model=List[Dict]) -async def get_account_positions( - account_name: str, - connector_name: str, - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get current positions for a specific perpetual connector. - - This endpoint fetches real-time position data directly from the connector, - including unrealized PnL, leverage, funding fees, and margin information. - - Args: - account_name: Name of the account - connector_name: Name of the perpetual connector - - Returns: - List of current position dictionaries with real-time data - - Raises: - HTTPException: 400 if connector is not perpetual or doesn't support positions - HTTPException: 404 if account or connector not found - HTTPException: 500 if there's an error fetching positions - """ - try: - return await accounts_service.get_account_positions(account_name, connector_name) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") - -@router.get("/{account_name}/positions/snapshots", response_model=List[Dict]) -async def get_position_snapshots( - account_name: str, - connector_name: Optional[str] = Query(default=None, description="Filter by specific connector"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get latest position snapshots from database for historical analysis. - - Returns the most recent position snapshots for the specified account, - optionally filtered by connector. Useful for tracking position history - and performance over time. - - Args: - account_name: Name of the account - connector_name: Optional connector name to filter results - - Returns: - List of latest position snapshot dictionaries from database - - Raises: - HTTPException: 404 if account not found - HTTPException: 500 if there's an error fetching snapshots - """ - try: - return await accounts_service.get_position_snapshots(account_name, connector_name) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching position snapshots: {str(e)}") - - -@router.get("/{account_name}/positions", response_model=List[Dict]) -async def get_all_account_positions( - account_name: str, - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get current positions across all perpetual connectors for an account. - - This endpoint aggregates real-time position data from all perpetual connectors - associated with the specified account, providing a complete portfolio view. - - Args: - account_name: Name of the account - - Returns: - List of position dictionaries from all perpetual connectors - - Raises: - HTTPException: 404 if account not found - HTTPException: 500 if there's an error fetching positions - """ - try: - all_positions = [] - - # Get all connectors for the account - all_connectors = accounts_service.connector_manager.get_all_connectors() - - if account_name in all_connectors: - for connector_name in all_connectors[account_name].keys(): - # Only fetch positions from perpetual connectors - if "_perpetual" in connector_name: - try: - positions = await accounts_service.get_account_positions(account_name, connector_name) - all_positions.extend(positions) - except Exception as e: - # Log error but continue with other connectors - import logging - logging.warning(f"Failed to get positions for {connector_name}: {e}") - - return all_positions - - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching account positions: {str(e)}") - - -# Funding Fee Management Endpoints - -@router.get("/{account_name}/{connector_name}/funding-payments", response_model=List[Dict]) -async def get_funding_payments( - account_name: str, - connector_name: str, - trading_pair: Optional[str] = Query(default=None, description="Filter by trading pair"), - limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get funding payment history for a specific perpetual connector. - - This endpoint retrieves historical funding payment records including - funding rates, payment amounts, and position data at time of payment. - - Args: - account_name: Name of the account - connector_name: Name of the perpetual connector - trading_pair: Optional trading pair filter - limit: Maximum number of records to return - - Returns: - List of funding payment records with rates, amounts, and position data - - Raises: - HTTPException: 400 if connector is not perpetual - HTTPException: 404 if account or connector not found - HTTPException: 500 if there's an error fetching funding payments - """ - try: - # Validate this is a perpetual connector - if "_perpetual" not in connector_name: - raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") - - return await accounts_service.get_funding_payments( - account_name=account_name, - connector_name=connector_name, - trading_pair=trading_pair, - limit=limit - ) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") - - -@router.get("/{account_name}/{connector_name}/funding-fees/{trading_pair}", response_model=Dict) -async def get_total_funding_fees( - account_name: str, - connector_name: str, - trading_pair: str, - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get total funding fees summary for a specific trading pair. - - This endpoint provides aggregated funding fee information including - total fees paid/received, payment count, and fee currency. - - Args: - account_name: Name of the account - connector_name: Name of the perpetual connector - trading_pair: Trading pair to get fees for - - Returns: - Dictionary with total funding fees summary - - Raises: - HTTPException: 400 if connector is not perpetual - HTTPException: 404 if account or connector not found - HTTPException: 500 if there's an error calculating fees - """ - try: - # Validate this is a perpetual connector - if "_perpetual" not in connector_name: - raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' is not a perpetual connector") - - return await accounts_service.get_total_funding_fees( - account_name=account_name, - connector_name=connector_name, - trading_pair=trading_pair - ) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error calculating funding fees: {str(e)}") - - -@router.get("/{account_name}/funding-payments", response_model=List[Dict]) -async def get_all_account_funding_payments( - account_name: str, - limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get funding payment history across all perpetual connectors for an account. - - This endpoint aggregates funding payment data from all perpetual connectors - associated with the specified account, providing a complete funding fee view. - - Args: - account_name: Name of the account - limit: Maximum number of records to return - - Returns: - List of funding payment records from all perpetual connectors - - Raises: - HTTPException: 404 if account not found - HTTPException: 500 if there's an error fetching funding payments - """ - try: - all_funding_payments = [] - - # Get all connectors for the account - all_connectors = accounts_service.connector_manager.get_all_connectors() - - if account_name in all_connectors: - for connector_name in all_connectors[account_name].keys(): - # Only fetch funding payments from perpetual connectors - if "_perpetual" in connector_name: - try: - payments = await accounts_service.get_funding_payments( - account_name=account_name, - connector_name=connector_name, - limit=limit - ) - all_funding_payments.extend(payments) - except Exception as e: - # Log error but continue with other connectors - import logging - logging.warning(f"Failed to get funding payments for {connector_name}: {e}") - - # Sort by timestamp (most recent first) - all_funding_payments.sort(key=lambda x: x.get("timestamp", ""), reverse=True) - - # Apply limit to the combined results - return all_funding_payments[:limit] - - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching account funding payments: {str(e)}") - - - From 1d85b704f41b9e5f8cbb81a4adc4a263e5a33e7a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 16:07:50 +0200 Subject: [PATCH 153/244] (feat) update backend api docker compose --- docker-compose.yml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index c176d84f..6ce0d47a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,12 +7,13 @@ services: volumes: - ./bots:/backend-api/bots - /var/run/docker.sock:/var/run/docker.sock + env_file: + - .env environment: + # Override specific values for Docker networking - BROKER_HOST=emqx - - BROKER_PORT=1883 - - USERNAME=admin - - PASSWORD=admin - DATABASE_URL=postgresql+asyncpg://hbot:backend-api@postgres:5432/backend_api + - BOTS_PATH=/backend-api/bots networks: - emqx-bridge depends_on: From a7a94892922196f343fad396155110ca6cb0794a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 17:17:09 +0200 Subject: [PATCH 154/244] (feat) remove imports --- services/market_data_feed_manager.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py index c15e26c0..fb04bc0e 100644 --- a/services/market_data_feed_manager.py +++ b/services/market_data_feed_manager.py @@ -1,9 +1,8 @@ import asyncio import time -from typing import Dict, Optional, Any, Callable, List, Set +from typing import Dict, Optional, Callable, List import logging from enum import Enum -from decimal import Decimal from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.data_feed.market_data_provider import MarketDataProvider From c89af0774bb9d2c4cd5ce898dff166bb97f3b609 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 17:36:31 +0200 Subject: [PATCH 155/244] (feat) simplify positions info --- database/repositories/position_repository.py | 133 ------------------- services/accounts_service.py | 100 +------------- 2 files changed, 1 insertion(+), 232 deletions(-) delete mode 100644 database/repositories/position_repository.py diff --git a/database/repositories/position_repository.py b/database/repositories/position_repository.py deleted file mode 100644 index 8ffaaee8..00000000 --- a/database/repositories/position_repository.py +++ /dev/null @@ -1,133 +0,0 @@ -from datetime import datetime -from typing import Dict, List, Optional -from decimal import Decimal - -from sqlalchemy import desc, select, func -from sqlalchemy.ext.asyncio import AsyncSession - -from database.models import PositionSnapshot - - -class PositionRepository: - def __init__(self, session: AsyncSession): - self.session = session - - async def create_position_snapshot(self, position_data: Dict) -> PositionSnapshot: - """Create a new position snapshot record.""" - position = PositionSnapshot(**position_data) - self.session.add(position) - await self.session.flush() # Get the ID - return position - - async def get_latest_positions(self, account_name: str, connector_name: str) -> List[PositionSnapshot]: - """Get the latest position snapshots for an account-connector pair.""" - # Get the latest snapshot for each trading pair - subquery = ( - select(PositionSnapshot.trading_pair, - func.max(PositionSnapshot.timestamp).label('max_timestamp')) - .where( - PositionSnapshot.account_name == account_name, - PositionSnapshot.connector_name == connector_name, - PositionSnapshot.exchange_size != 0 # Only active positions - ) - .group_by(PositionSnapshot.trading_pair) - .subquery() - ) - - query = ( - select(PositionSnapshot) - .join(subquery, - (PositionSnapshot.trading_pair == subquery.c.trading_pair) & - (PositionSnapshot.timestamp == subquery.c.max_timestamp)) - .where( - PositionSnapshot.account_name == account_name, - PositionSnapshot.connector_name == connector_name - ) - ) - - result = await self.session.execute(query) - return result.scalars().all() - - async def get_position_history(self, account_name: str, connector_name: str, - trading_pair: str, limit: int = 100) -> List[PositionSnapshot]: - """Get position history for a specific trading pair.""" - query = ( - select(PositionSnapshot) - .where( - PositionSnapshot.account_name == account_name, - PositionSnapshot.connector_name == connector_name, - PositionSnapshot.trading_pair == trading_pair - ) - .order_by(PositionSnapshot.timestamp.desc()) - .limit(limit) - ) - - result = await self.session.execute(query) - return result.scalars().all() - - async def update_position_reconciliation(self, position_id: int, - calculated_size: Decimal, - calculated_entry_price: Decimal = None) -> Optional[PositionSnapshot]: - """Update position with calculated values for reconciliation.""" - result = await self.session.execute( - select(PositionSnapshot).where(PositionSnapshot.id == position_id) - ) - position = result.scalar_one_or_none() - - if position: - position.calculated_size = float(calculated_size) - if calculated_entry_price: - position.calculated_entry_price = float(calculated_entry_price) - - # Calculate difference and reconciliation status - size_diff = abs(calculated_size - Decimal(str(position.exchange_size))) - position.size_difference = float(size_diff) - - # Set reconciliation status (within 0.1% tolerance) - tolerance = Decimal(str(position.exchange_size)) * Decimal('0.001') - if size_diff <= tolerance: - position.is_reconciled = "RECONCILED" - else: - position.is_reconciled = "MISMATCH" - - await self.session.flush() - - return position - - async def get_reconciliation_mismatches(self, account_name: str = None) -> List[PositionSnapshot]: - """Get positions with reconciliation mismatches.""" - query = select(PositionSnapshot).where(PositionSnapshot.is_reconciled == "MISMATCH") - - if account_name: - query = query.where(PositionSnapshot.account_name == account_name) - - query = query.order_by(PositionSnapshot.timestamp.desc()) - - result = await self.session.execute(query) - return result.scalars().all() - - def to_dict(self, position: PositionSnapshot) -> Dict: - """Convert PositionSnapshot model to dictionary format.""" - return { - "id": position.id, - "account_name": position.account_name, - "connector_name": position.connector_name, - "trading_pair": position.trading_pair, - "timestamp": position.timestamp.isoformat(), - "side": position.side, - "exchange_size": float(position.exchange_size), - "entry_price": float(position.entry_price) if position.entry_price else None, - "mark_price": float(position.mark_price) if position.mark_price else None, - "unrealized_pnl": float(position.unrealized_pnl) if position.unrealized_pnl else None, - "percentage_pnl": float(position.percentage_pnl) if position.percentage_pnl else None, - "leverage": float(position.leverage) if position.leverage else None, - "initial_margin": float(position.initial_margin) if position.initial_margin else None, - "maintenance_margin": float(position.maintenance_margin) if position.maintenance_margin else None, - "cumulative_funding_fees": float(position.cumulative_funding_fees), - "fee_currency": position.fee_currency, - "calculated_size": float(position.calculated_size) if position.calculated_size else None, - "calculated_entry_price": float(position.calculated_entry_price) if position.calculated_entry_price else None, - "size_difference": float(position.size_difference) if position.size_difference else None, - "exchange_position_id": position.exchange_position_id, - "is_reconciled": position.is_reconciled, - } \ No newline at end of file diff --git a/services/accounts_service.py b/services/accounts_service.py index 5b53420b..f9bde5c6 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1083,17 +1083,12 @@ async def get_account_positions(self, account_name: str, connector_name: str) -> position_dict = { "account_name": account_name, "connector_name": connector_name, - "trading_pair": trading_pair, + "trading_pair": position_info.trading_pair, "side": position_info.position_side.name if hasattr(position_info, 'position_side') else "UNKNOWN", "amount": float(position_info.amount) if hasattr(position_info, 'amount') else 0.0, "entry_price": float(position_info.entry_price) if hasattr(position_info, 'entry_price') else None, - "mark_price": float(position_info.mark_price) if hasattr(position_info, 'mark_price') else None, "unrealized_pnl": float(position_info.unrealized_pnl) if hasattr(position_info, 'unrealized_pnl') else None, - "percentage_pnl": float(position_info.unrealized_pnl_percentage) if hasattr(position_info, 'unrealized_pnl_percentage') else None, "leverage": float(position_info.leverage) if hasattr(position_info, 'leverage') else None, - "margin": float(position_info.initial_margin) if hasattr(position_info, 'initial_margin') else None, - "maintenance_margin": float(position_info.maintenance_margin) if hasattr(position_info, 'maintenance_margin') else None, - "funding_fees": float(position_info.cumulative_funding_fee) if hasattr(position_info, 'cumulative_funding_fee') else 0.0, } # Only include positions with non-zero amounts @@ -1106,99 +1101,6 @@ async def get_account_positions(self, account_name: str, connector_name: str) -> logging.error(f"Failed to get positions for {connector_name}: {e}") raise HTTPException(status_code=500, detail=f"Failed to get positions: {str(e)}") - async def save_position_snapshot(self, account_name: str, connector_name: str) -> Dict[str, int]: - """ - Save current positions as snapshots in the database for historical tracking. - - Args: - account_name: Name of the account - connector_name: Name of the connector - - Returns: - Dictionary with count of snapshots saved - """ - await self.ensure_db_initialized() - - try: - # Get current positions from connector - positions = await self.get_account_positions(account_name, connector_name) - - if not positions: - return {"snapshots_saved": 0, "message": "No active positions to save"} - - async with self.db_manager.get_session_context() as session: - position_repo = PositionRepository(session) - snapshots_saved = 0 - - for position in positions: - # Create snapshot data - snapshot_data = { - "account_name": account_name, - "connector_name": connector_name, - "trading_pair": position["trading_pair"], - "side": position["side"], - "exchange_size": position["amount"], - "entry_price": position["entry_price"], - "mark_price": position["mark_price"], - "unrealized_pnl": position["unrealized_pnl"], - "percentage_pnl": position["percentage_pnl"], - "leverage": position["leverage"], - "initial_margin": position["margin"], - "maintenance_margin": position["maintenance_margin"], - "cumulative_funding_fees": position["funding_fees"], - "fee_currency": "USDT", # Most perpetuals use USDT - "is_reconciled": "PENDING" - } - - await position_repo.create_position_snapshot(snapshot_data) - snapshots_saved += 1 - - return { - "snapshots_saved": snapshots_saved, - "message": f"Saved {snapshots_saved} position snapshots for {account_name}/{connector_name}" - } - - except Exception as e: - logging.error(f"Error saving position snapshots: {e}") - raise HTTPException(status_code=500, detail=f"Failed to save position snapshots: {str(e)}") - - async def get_position_snapshots(self, account_name: str, connector_name: str = None) -> List[Dict]: - """ - Get latest position snapshots from database. - - Args: - account_name: Name of the account - connector_name: Optional connector name filter - - Returns: - List of latest position snapshots - """ - await self.ensure_db_initialized() - - try: - async with self.db_manager.get_session_context() as session: - position_repo = PositionRepository(session) - - if connector_name: - positions = await position_repo.get_latest_positions(account_name, connector_name) - return [position_repo.to_dict(pos) for pos in positions] - else: - # Get for all perpetual connectors - all_positions = [] - all_connectors = self.connector_manager.get_all_connectors() - - if account_name in all_connectors: - for conn_name in all_connectors[account_name].keys(): - if "_perpetual" in conn_name: - positions = await position_repo.get_latest_positions(account_name, conn_name) - all_positions.extend([position_repo.to_dict(pos) for pos in positions]) - - return all_positions - - except Exception as e: - logging.error(f"Error getting position snapshots: {e}") - return [] - async def get_funding_payments(self, account_name: str, connector_name: str = None, trading_pair: str = None, limit: int = 100) -> List[Dict]: """ From 62ec930099ac8eb5fe0af773846bce950b1e7147 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 18:38:18 +0200 Subject: [PATCH 156/244] (feat) add logfire --- environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/environment.yml b/environment.yml index cce70501..e61f71e2 100644 --- a/environment.yml +++ b/environment.yml @@ -25,3 +25,4 @@ dependencies: - psycopg2-binary - greenlet - pydantic-settings + - logfire From 22ab68cd19bb090702ded31558bfada79a906aca Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 20:50:50 +0200 Subject: [PATCH 157/244] (feat) remove positions repository --- database/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/database/__init__.py b/database/__init__.py index e7f49783..f28a347e 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -3,7 +3,6 @@ from .repositories import AccountRepository from .repositories.order_repository import OrderRepository from .repositories.trade_repository import TradeRepository -from .repositories.position_repository import PositionRepository from .repositories.funding_repository import FundingRepository -__all__ = ["AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "Base", "AsyncDatabaseManager", "AccountRepository", "OrderRepository", "TradeRepository", "PositionRepository", "FundingRepository"] \ No newline at end of file +__all__ = ["AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "Base", "AsyncDatabaseManager", "AccountRepository", "OrderRepository", "TradeRepository", "FundingRepository"] \ No newline at end of file From d56ecfa92aa5335eb76470a107f98fef8fd3b2c3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 20:51:04 +0200 Subject: [PATCH 158/244] (feat) reorganize routers in connectors --- routers/accounts.py | 26 --------- routers/connectors.py | 115 ++++++++++++++++++++++++++++++++++++++++ routers/market_data.py | 116 ----------------------------------------- 3 files changed, 115 insertions(+), 142 deletions(-) create mode 100644 routers/connectors.py diff --git a/routers/accounts.py b/routers/accounts.py index 31e7f45a..56dd9e94 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -2,7 +2,6 @@ from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, Query -from hummingbot.client.settings import AllConnectorSettings from starlette import status from services.accounts_service import AccountsService @@ -474,31 +473,6 @@ async def get_funding_fees_summary( raise HTTPException(status_code=500, detail=f"Error calculating funding fees: {str(e)}") -@router.get("/connectors", response_model=List[str]) -async def available_connectors(): - """ - Get a list of all available connectors. - - Returns: - List of connector names supported by the system - """ - return list(AllConnectorSettings.get_connector_settings().keys()) - - -@router.get("/connector-config-map/{connector_name}", response_model=List[str]) -async def get_connector_config_map(connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get configuration fields required for a specific connector. - - Args: - connector_name: Name of the connector to get config map for - - Returns: - List of configuration field names required for the connector - """ - return accounts_service.get_connector_config_map(connector_name) - - @router.get("/", response_model=List[str]) async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): """ diff --git a/routers/connectors.py b/routers/connectors.py new file mode 100644 index 00000000..fbc0d40a --- /dev/null +++ b/routers/connectors.py @@ -0,0 +1,115 @@ +from typing import List, Optional + +from fastapi import APIRouter, Depends, Request, HTTPException, Query +from hummingbot.client.settings import AllConnectorSettings + +from services.accounts_service import AccountsService +from services.market_data_feed_manager import MarketDataFeedManager +from deps import get_accounts_service + +router = APIRouter(tags=["Connectors"], prefix="/connectors") + + +@router.get("/", response_model=List[str]) +async def available_connectors(): + """ + Get a list of all available connectors. + + Returns: + List of connector names supported by the system + """ + return list(AllConnectorSettings.get_connector_settings().keys()) + + +@router.get("/{connector_name}/config-map", response_model=List[str]) +async def get_connector_config_map(connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Get configuration fields required for a specific connector. + + Args: + connector_name: Name of the connector to get config map for + + Returns: + List of configuration field names required for the connector + """ + return accounts_service.get_connector_config_map(connector_name) + + +@router.get("/{connector_name}/trading-rules") +async def get_trading_rules( + request: Request, + connector_name: str, + trading_pairs: Optional[List[str]] = Query(default=None, description="Filter by specific trading pairs") +): + """ + Get trading rules for a connector, optionally filtered by trading pairs. + + This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + which means no authentication or account setup is required. + + Args: + request: FastAPI request object + connector_name: Name of the connector (e.g., 'binance', 'binance_perpetual') + trading_pairs: Optional list of trading pairs to filter by (e.g., ['BTC-USDT', 'ETH-USDT']) + + Returns: + Dictionary mapping trading pairs to their trading rules + + Raises: + HTTPException: 404 if connector not found, 500 for other errors + """ + try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Get trading rules (filtered by trading pairs if provided) + rules = await market_data_feed_manager.get_trading_rules(connector_name, trading_pairs) + + if "error" in rules: + raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found or error: {rules['error']}") + + return rules + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") + + +@router.get("/{connector_name}/order-types") +async def get_supported_order_types(request: Request, connector_name: str): + """ + Get order types supported by a specific connector. + + This endpoint uses the MarketDataFeedManager to access non-trading connector instances, + which means no authentication or account setup is required. + + Args: + request: FastAPI request object + connector_name: Name of the connector (e.g., 'binance', 'binance_perpetual') + + Returns: + List of supported order types (LIMIT, MARKET, LIMIT_MAKER) + + Raises: + HTTPException: 404 if connector not found, 500 for other errors + """ + try: + market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager + + # Access connector through MarketDataProvider's _rate_sources + connector_instance = market_data_feed_manager.market_data_provider._rate_sources.get(connector_name) + + if not connector_instance: + raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found") + + # Get supported order types + if hasattr(connector_instance, 'supported_order_types'): + order_types = [order_type.name for order_type in connector_instance.supported_order_types()] + return {"connector": connector_name, "supported_order_types": order_types} + else: + raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' does not support order types query") + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") \ No newline at end of file diff --git a/routers/market_data.py b/routers/market_data.py index 95a7cebf..eca9a8e5 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -121,119 +121,3 @@ async def get_market_data_settings(): } -# Trading Rules Endpoints -@router.get("/trading-rules/{connector}") -async def get_all_trading_rules(request: Request, connector: str): - """ - Get trading rules for all available trading pairs on a connector. - - This endpoint uses the MarketDataFeedManager to access non-trading connector instances, - which means no authentication or account setup is required. - - Args: - request: FastAPI request object - connector: Name of the connector (e.g., 'binance', 'binance_perpetual') - - Returns: - Dictionary mapping trading pairs to their trading rules - - Raises: - HTTPException: 404 if connector not found, 500 for other errors - """ - try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager - - # Get trading rules for all pairs - rules = await market_data_feed_manager.get_trading_rules(connector) - - if "error" in rules: - raise HTTPException(status_code=404, detail=f"Connector '{connector}' not found or error: {rules['error']}") - - return rules - - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") - - -@router.get("/trading-rules/{connector}/{trading_pair}") -async def get_trading_rules_for_pair(request: Request, connector: str, trading_pair: str): - """ - Get trading rules for a specific trading pair on a connector. - - This endpoint uses the MarketDataFeedManager to access non-trading connector instances, - which means no authentication or account setup is required. - - Args: - request: FastAPI request object - connector: Name of the connector (e.g., 'binance', 'binance_perpetual') - trading_pair: Trading pair to get rules for (e.g., 'BTC-USDT') - - Returns: - Trading rules including minimum order size, price increment, etc. - - Raises: - HTTPException: 404 if connector or trading pair not found, 500 for other errors - """ - try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager - - # Get trading rules for specific pair - rules = await market_data_feed_manager.get_trading_rules(connector, [trading_pair]) - - if "error" in rules: - raise HTTPException(status_code=404, detail=f"Connector '{connector}' not found or error: {rules['error']}") - - if trading_pair not in rules: - raise HTTPException(status_code=404, detail=f"Trading pair '{trading_pair}' not found on {connector}") - - if "error" in rules[trading_pair]: - raise HTTPException(status_code=404, detail=rules[trading_pair]["error"]) - - return rules[trading_pair] - - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving trading rules: {str(e)}") - - -@router.get("/supported-order-types/{connector}") -async def get_supported_order_types(request: Request, connector: str): - """ - Get order types supported by a specific connector. - - This endpoint uses the MarketDataFeedManager to access non-trading connector instances, - which means no authentication or account setup is required. - - Args: - request: FastAPI request object - connector: Name of the connector (e.g., 'binance', 'binance_perpetual') - - Returns: - List of supported order types (LIMIT, MARKET, LIMIT_MAKER) - - Raises: - HTTPException: 404 if connector not found, 500 for other errors - """ - try: - market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager - - # Access connector through MarketDataProvider's _rate_sources - connector_instance = market_data_feed_manager.market_data_provider._rate_sources.get(connector) - - if not connector_instance: - raise HTTPException(status_code=404, detail=f"Connector '{connector}' not found") - - # Get supported order types - if hasattr(connector_instance, 'supported_order_types'): - order_types = [order_type.name for order_type in connector_instance.supported_order_types()] - return {"connector": connector, "supported_order_types": order_types} - else: - raise HTTPException(status_code=404, detail=f"Connector '{connector}' does not support order types query") - - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") From 0d86e78a8475c98b39f1d12f43a6486b59bee588 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 20:51:17 +0200 Subject: [PATCH 159/244] (feat) remove positions repository --- main.py | 2 ++ services/accounts_service.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/main.py b/main.py index 75da8763..8f6dd928 100644 --- a/main.py +++ b/main.py @@ -22,6 +22,7 @@ archived_bots, backtesting, bot_orchestration, + connectors, controllers, docker, market_data, @@ -170,6 +171,7 @@ def auth_user( # Include all routers with authentication app.include_router(docker.router, dependencies=[Depends(auth_user)]) app.include_router(accounts.router, dependencies=[Depends(auth_user)]) +app.include_router(connectors.router, dependencies=[Depends(auth_user)]) app.include_router(trading.router, dependencies=[Depends(auth_user)]) app.include_router(bot_orchestration.router, dependencies=[Depends(auth_user)]) app.include_router(controllers.router, dependencies=[Depends(auth_user)]) diff --git a/services/accounts_service.py b/services/accounts_service.py index f9bde5c6..3b12c979 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -9,7 +9,7 @@ from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction, PositionMode from config import settings -from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository, PositionRepository, FundingRepository +from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository, FundingRepository from services.market_data_feed_manager import MarketDataFeedManager from utils.connector_manager import ConnectorManager From 394e1e5904fc98726aa21e47aae2e9ce4a400ff3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 22:53:07 +0200 Subject: [PATCH 160/244] (feat) move funding payments and positions to trading --- routers/accounts.py | 250 -------------------------------------------- routers/trading.py | 121 ++++++++++++++++++++- 2 files changed, 117 insertions(+), 254 deletions(-) diff --git a/routers/accounts.py b/routers/accounts.py index 56dd9e94..60c44537 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -223,256 +223,6 @@ async def get_accounts_distribution( return filtered_distribution - -@router.get("/positions", response_model=List[Dict]) -async def get_positions( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get current positions across all or filtered perpetual connectors. - - This endpoint fetches real-time position data directly from the connectors, - including unrealized PnL, leverage, funding fees, and margin information. - - Args: - account_names: Optional list of account names to filter by - connector_names: Optional list of connector names to filter by - - Returns: - List of current position dictionaries with real-time data from filtered accounts/connectors - - Raises: - HTTPException: 500 if there's an error fetching positions - """ - try: - all_positions = [] - all_connectors = accounts_service.connector_manager.get_all_connectors() - - # Filter accounts - accounts_to_check = account_names if account_names else list(all_connectors.keys()) - - for account_name in accounts_to_check: - if account_name in all_connectors: - # Filter connectors - connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) - - for connector_name in connectors_to_check: - # Only fetch positions from perpetual connectors - if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: - try: - positions = await accounts_service.get_account_positions(account_name, connector_name) - all_positions.extend(positions) - except Exception as e: - # Log error but continue with other connectors - import logging - logging.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") - - return all_positions - - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") - - -@router.get("/positions/snapshots", response_model=List[Dict]) -async def get_position_snapshots( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get latest position snapshots from database for historical analysis. - - Returns the most recent position snapshots for all or filtered accounts, - optionally filtered by connectors. Useful for tracking position history - and performance over time. - - Args: - account_names: Optional list of account names to filter by - connector_names: Optional list of connector names to filter by - - Returns: - List of latest position snapshot dictionaries from database - - Raises: - HTTPException: 500 if there's an error fetching snapshots - """ - try: - all_snapshots = [] - - # Get all accounts if not specified - if not account_names: - account_names = accounts_service.list_accounts() - - for account_name in account_names: - try: - # If specific connectors are requested, fetch each separately - if connector_names: - for connector_name in connector_names: - snapshots = await accounts_service.get_position_snapshots(account_name, connector_name) - all_snapshots.extend(snapshots) - else: - # Get all snapshots for the account - snapshots = await accounts_service.get_position_snapshots(account_name, None) - all_snapshots.extend(snapshots) - except Exception as e: - # Log error but continue with other accounts - import logging - logging.warning(f"Failed to get position snapshots for {account_name}: {e}") - - return all_snapshots - - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching position snapshots: {str(e)}") - - -@router.get("/funding-payments", response_model=List[Dict]) -async def get_funding_payments( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), - trading_pair: Optional[str] = Query(default=None, description="Filter by trading pair"), - limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get funding payment history across all or filtered perpetual connectors. - - This endpoint retrieves historical funding payment records including - funding rates, payment amounts, and position data at time of payment. - - Args: - account_names: Optional list of account names to filter by - connector_names: Optional list of connector names to filter by - trading_pair: Optional trading pair filter - limit: Maximum number of records to return - - Returns: - List of funding payment records with rates, amounts, and position data - - Raises: - HTTPException: 500 if there's an error fetching funding payments - """ - try: - all_funding_payments = [] - all_connectors = accounts_service.connector_manager.get_all_connectors() - - # Filter accounts - accounts_to_check = account_names if account_names else list(all_connectors.keys()) - - for account_name in accounts_to_check: - if account_name in all_connectors: - # Filter connectors - connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) - - for connector_name in connectors_to_check: - # Only fetch funding payments from perpetual connectors - if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: - try: - payments = await accounts_service.get_funding_payments( - account_name=account_name, - connector_name=connector_name, - trading_pair=trading_pair, - limit=limit - ) - all_funding_payments.extend(payments) - except Exception as e: - # Log error but continue with other connectors - import logging - logging.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") - - # Sort by timestamp (most recent first) - all_funding_payments.sort(key=lambda x: x.get("timestamp", ""), reverse=True) - - # Apply limit to the combined results - return all_funding_payments[:limit] - - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") - - -@router.get("/funding-fees/summary", response_model=List[Dict]) -async def get_funding_fees_summary( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), - trading_pairs: Optional[List[str]] = Query(default=None, description="Filter by trading pairs"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get total funding fees summary across all or filtered perpetual connectors. - - This endpoint provides aggregated funding fee information including - total fees paid/received, payment count, and fee currency for each - trading pair across the filtered accounts and connectors. - - Args: - account_names: Optional list of account names to filter by - connector_names: Optional list of connector names to filter by - trading_pairs: Optional list of trading pairs to filter by - - Returns: - List of funding fee summaries by trading pair with totals - - Raises: - HTTPException: 500 if there's an error calculating fees - """ - try: - all_fee_summaries = [] - all_connectors = accounts_service.connector_manager.get_all_connectors() - - # Filter accounts - accounts_to_check = account_names if account_names else list(all_connectors.keys()) - - for account_name in accounts_to_check: - if account_name in all_connectors: - # Filter connectors - connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) - - for connector_name in connectors_to_check: - # Only get fees from perpetual connectors - if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: - # Get all trading pairs for this connector if not specified - pairs_to_check = trading_pairs if trading_pairs else [] - - # If no specific pairs requested, get all available pairs from funding payments - if not pairs_to_check: - try: - # Get a sample of funding payments to find available pairs - payments = await accounts_service.get_funding_payments( - account_name=account_name, - connector_name=connector_name, - limit=1000 - ) - # Extract unique trading pairs - pairs_to_check = list( - set(p.get("trading_pair") for p in payments if p.get("trading_pair"))) - except Exception: - continue - - # Get fee summary for each pair - for trading_pair in pairs_to_check: - try: - fee_summary = await accounts_service.get_total_funding_fees( - account_name=account_name, - connector_name=connector_name, - trading_pair=trading_pair - ) - # Add account and connector info to the summary - fee_summary["account_name"] = account_name - fee_summary["connector_name"] = connector_name - all_fee_summaries.append(fee_summary) - except Exception as e: - # Log error but continue with other pairs - import logging - logging.warning( - f"Failed to get funding fees for {account_name}/{connector_name}/{trading_pair}: {e}") - - return all_fee_summaries - - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error calculating funding fees: {str(e)}") - - @router.get("/", response_model=List[str]) async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): """ diff --git a/routers/trading.py b/routers/trading.py index b3acab01..d5858737 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -1,5 +1,4 @@ from typing import Dict, List, Optional -from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, Query from hummingbot.core.data_type.common import PositionMode, TradeType, OrderType, PositionAction @@ -7,13 +6,12 @@ from services.accounts_service import AccountsService from deps import get_accounts_service, get_market_data_feed_manager -from models import PaginatedResponse, TradeRequest, TradeResponse +from models import TradeRequest, TradeResponse from models.accounts import PositionModeRequest, LeverageRequest router = APIRouter(tags=["Trading"], prefix="/trading") - # Trade Execution @router.post("/orders", response_model=TradeResponse, status_code=status.HTTP_201_CREATED) async def place_trade(trade_request: TradeRequest, @@ -70,6 +68,58 @@ async def place_trade(trade_request: TradeRequest, +@router.get("/positions", response_model=List[Dict]) +async def get_positions( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get current positions across all or filtered perpetual connectors. + + This endpoint fetches real-time position data directly from the connectors, + including unrealized PnL, leverage, funding fees, and margin information. + + Args: + account_names: Optional list of account names to filter by + connector_names: Optional list of connector names to filter by + + Returns: + List of current position dictionaries with real-time data from filtered accounts/connectors + + Raises: + HTTPException: 500 if there's an error fetching positions + """ + try: + all_positions = [] + all_connectors = accounts_service.connector_manager.get_all_connectors() + + # Filter accounts + accounts_to_check = account_names if account_names else list(all_connectors.keys()) + + for account_name in accounts_to_check: + if account_name in all_connectors: + # Filter connectors + connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + + for connector_name in connectors_to_check: + # Only fetch positions from perpetual connectors + if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: + try: + positions = await accounts_service.get_account_positions(account_name, connector_name) + all_positions.extend(positions) + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") + + return all_positions + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") + + + # Order Management @router.get("/{account_name}/{connector_name}/orders/active", response_model=Dict[str, Dict]) async def get_connector_active_orders(account_name: str, connector_name: str, @@ -558,4 +608,67 @@ async def get_supported_order_types(account_name: str, connector_name: str, except HTTPException: raise except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") \ No newline at end of file + raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") + +@router.get("/funding-payments", response_model=List[Dict]) +async def get_funding_payments( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), + trading_pair: Optional[str] = Query(default=None, description="Filter by trading pair"), + limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get funding payment history across all or filtered perpetual connectors. + + This endpoint retrieves historical funding payment records including + funding rates, payment amounts, and position data at time of payment. + + Args: + account_names: Optional list of account names to filter by + connector_names: Optional list of connector names to filter by + trading_pair: Optional trading pair filter + limit: Maximum number of records to return + + Returns: + List of funding payment records with rates, amounts, and position data + + Raises: + HTTPException: 500 if there's an error fetching funding payments + """ + try: + all_funding_payments = [] + all_connectors = accounts_service.connector_manager.get_all_connectors() + + # Filter accounts + accounts_to_check = account_names if account_names else list(all_connectors.keys()) + + for account_name in accounts_to_check: + if account_name in all_connectors: + # Filter connectors + connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + + for connector_name in connectors_to_check: + # Only fetch funding payments from perpetual connectors + if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: + try: + payments = await accounts_service.get_funding_payments( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + limit=limit + ) + all_funding_payments.extend(payments) + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") + + # Sort by timestamp (most recent first) + all_funding_payments.sort(key=lambda x: x.get("timestamp", ""), reverse=True) + + # Apply limit to the combined results + return all_funding_payments[:limit] + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") \ No newline at end of file From ee61ae734b4cede17042ca9056d11b756cd8087f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 23:10:05 +0200 Subject: [PATCH 161/244] (feat) add portfolio router --- main.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/main.py b/main.py index 8f6dd928..34272c15 100644 --- a/main.py +++ b/main.py @@ -26,6 +26,7 @@ controllers, docker, market_data, + portfolio, scripts, trading ) @@ -172,6 +173,7 @@ def auth_user( app.include_router(docker.router, dependencies=[Depends(auth_user)]) app.include_router(accounts.router, dependencies=[Depends(auth_user)]) app.include_router(connectors.router, dependencies=[Depends(auth_user)]) +app.include_router(portfolio.router, dependencies=[Depends(auth_user)]) app.include_router(trading.router, dependencies=[Depends(auth_user)]) app.include_router(bot_orchestration.router, dependencies=[Depends(auth_user)]) app.include_router(controllers.router, dependencies=[Depends(auth_user)]) From 9d72ed4d63c55e9c23e82b5264871d945217a1df Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 23:10:16 +0200 Subject: [PATCH 162/244] (feat) separate porftolio from accounts --- routers/accounts.py | 212 ----------------------------------------- routers/portfolio.py | 218 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 218 insertions(+), 212 deletions(-) create mode 100644 routers/portfolio.py diff --git a/routers/accounts.py b/routers/accounts.py index 60c44537..200a2930 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -11,218 +11,6 @@ router = APIRouter(tags=["Accounts"], prefix="/accounts") -# Portfolio & Account State Monitoring -@router.get("/portfolio/state", response_model=Dict[str, Dict[str, List[Dict]]]) -async def get_portfolio_state( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get the current state of all or filtered accounts portfolio. - - Args: - account_names: Optional list of account names to filter by - - Returns: - Dict containing account states with connector balances and token information - """ - all_states = accounts_service.get_accounts_state() - - # If no filter, return all accounts - if not account_names: - return all_states - - # Filter by requested accounts - filtered_states = {} - for account_name in account_names: - if account_name in all_states: - filtered_states[account_name] = all_states[account_name] - - return filtered_states - - -@router.get("/portfolio/history", response_model=PaginatedResponse) -async def get_portfolio_history( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get the historical state of all or filtered accounts portfolio with pagination. - - Args: - account_names: Optional list of account names to filter by - limit: Number of items per page (1-1000) - cursor: Cursor for pagination (ISO timestamp) - start_time: Start time for filtering results - end_time: End time for filtering results - - Returns: - Paginated response with historical portfolio data - """ - try: - if not account_names: - # Get history for all accounts - data, next_cursor, has_more = await accounts_service.load_account_state_history( - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) - else: - # Get history for specific accounts - need to aggregate - all_data = [] - for account_name in account_names: - acc_data, _, _ = await accounts_service.get_account_state_history( - account_name=account_name, - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time - ) - all_data.extend(acc_data) - - # Sort by timestamp and apply pagination - all_data.sort(key=lambda x: x.get("timestamp", ""), reverse=True) - - # Apply limit - data = all_data[:limit] - has_more = len(all_data) > limit - next_cursor = data[-1]["timestamp"] if data and has_more else None - - return PaginatedResponse( - data=data, - pagination={ - "limit": limit, - "has_more": has_more, - "next_cursor": next_cursor, - "current_cursor": cursor, - "filters": { - "account_names": account_names, - "start_time": start_time.isoformat() if start_time else None, - "end_time": end_time.isoformat() if end_time else None - } - } - ) - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - - - -@router.get("/portfolio/distribution") -async def get_portfolio_distribution( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get portfolio distribution by tokens with percentages across all or filtered accounts. - - Args: - account_names: Optional list of account names to filter by - - Returns: - Dictionary with token distribution including percentages, values, and breakdown by accounts/connectors - """ - if not account_names: - # Get distribution for all accounts - return accounts_service.get_portfolio_distribution() - elif len(account_names) == 1: - # Single account - use existing method - return accounts_service.get_portfolio_distribution(account_names[0]) - else: - # Multiple accounts - need to aggregate - aggregated_distribution = { - "tokens": {}, - "total_value": 0, - "token_count": 0, - "accounts": {} - } - - for account_name in account_names: - account_dist = accounts_service.get_portfolio_distribution(account_name) - - # Skip if account doesn't exist or has error - if account_dist.get("error") or account_dist.get("token_count", 0) == 0: - continue - - # Aggregate token data - for token, token_data in account_dist.get("tokens", {}).items(): - if token not in aggregated_distribution["tokens"]: - aggregated_distribution["tokens"][token] = { - "token": token, - "value": 0, - "percentage": 0, - "accounts": {} - } - - aggregated_distribution["tokens"][token]["value"] += token_data.get("value", 0) - - # Copy account-specific data - for acc_name, acc_data in token_data.get("accounts", {}).items(): - aggregated_distribution["tokens"][token]["accounts"][acc_name] = acc_data - - aggregated_distribution["total_value"] += account_dist.get("total_value", 0) - aggregated_distribution["accounts"][account_name] = account_dist.get("accounts", {}).get(account_name, {}) - - # Recalculate percentages - total_value = aggregated_distribution["total_value"] - if total_value > 0: - for token_data in aggregated_distribution["tokens"].values(): - token_data["percentage"] = (token_data["value"] / total_value) * 100 - - aggregated_distribution["token_count"] = len(aggregated_distribution["tokens"]) - - return aggregated_distribution - - - - -@router.get("/portfolio/accounts-distribution") -async def get_accounts_distribution( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get portfolio distribution by accounts with percentages. - - Args: - account_names: Optional list of account names to filter by - - Returns: - Dictionary with account distribution including percentages, values, and breakdown by connectors - """ - all_distribution = accounts_service.get_account_distribution() - - # If no filter, return all accounts - if not account_names: - return all_distribution - - # Filter the distribution by requested accounts - filtered_distribution = { - "accounts": {}, - "total_value": 0, - "account_count": 0 - } - - for account_name in account_names: - if account_name in all_distribution.get("accounts", {}): - filtered_distribution["accounts"][account_name] = all_distribution["accounts"][account_name] - filtered_distribution["total_value"] += all_distribution["accounts"][account_name].get("total_value", 0) - - # Recalculate percentages - total_value = filtered_distribution["total_value"] - if total_value > 0: - for account_data in filtered_distribution["accounts"].values(): - account_data["percentage"] = (account_data.get("total_value", 0) / total_value) * 100 - - filtered_distribution["account_count"] = len(filtered_distribution["accounts"]) - - return filtered_distribution - @router.get("/", response_model=List[str]) async def list_accounts(accounts_service: AccountsService = Depends(get_accounts_service)): """ diff --git a/routers/portfolio.py b/routers/portfolio.py new file mode 100644 index 00000000..937022b4 --- /dev/null +++ b/routers/portfolio.py @@ -0,0 +1,218 @@ +from typing import Dict, List, Optional +from datetime import datetime + +from fastapi import APIRouter, HTTPException, Depends, Query + +from services.accounts_service import AccountsService +from deps import get_accounts_service +from models import PaginatedResponse + +router = APIRouter(tags=["Portfolio"], prefix="/portfolio") + + +@router.get("/state", response_model=Dict[str, Dict[str, List[Dict]]]) +async def get_portfolio_state( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get the current state of all or filtered accounts portfolio. + + Args: + account_names: Optional list of account names to filter by + + Returns: + Dict containing account states with connector balances and token information + """ + all_states = accounts_service.get_accounts_state() + + # If no filter, return all accounts + if not account_names: + return all_states + + # Filter by requested accounts + filtered_states = {} + for account_name in account_names: + if account_name in all_states: + filtered_states[account_name] = all_states[account_name] + + return filtered_states + + +@router.get("/history", response_model=PaginatedResponse) +async def get_portfolio_history( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), + cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), + start_time: datetime = Query(default=None, description="Start time for filtering"), + end_time: datetime = Query(default=None, description="End time for filtering"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get the historical state of all or filtered accounts portfolio with pagination. + + Args: + account_names: Optional list of account names to filter by + limit: Number of items per page (1-1000) + cursor: Cursor for pagination (ISO timestamp) + start_time: Start time for filtering results + end_time: End time for filtering results + + Returns: + Paginated response with historical portfolio data + """ + try: + if not account_names: + # Get history for all accounts + data, next_cursor, has_more = await accounts_service.load_account_state_history( + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + else: + # Get history for specific accounts - need to aggregate + all_data = [] + for account_name in account_names: + acc_data, _, _ = await accounts_service.get_account_state_history( + account_name=account_name, + limit=limit, + cursor=cursor, + start_time=start_time, + end_time=end_time + ) + all_data.extend(acc_data) + + # Sort by timestamp and apply pagination + all_data.sort(key=lambda x: x.get("timestamp", ""), reverse=True) + + # Apply limit + data = all_data[:limit] + has_more = len(all_data) > limit + next_cursor = data[-1]["timestamp"] if data and has_more else None + + return PaginatedResponse( + data=data, + pagination={ + "limit": limit, + "has_more": has_more, + "next_cursor": next_cursor, + "current_cursor": cursor, + "filters": { + "account_names": account_names, + "start_time": start_time.isoformat() if start_time else None, + "end_time": end_time.isoformat() if end_time else None + } + } + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/distribution") +async def get_portfolio_distribution( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get portfolio distribution by tokens with percentages across all or filtered accounts. + + Args: + account_names: Optional list of account names to filter by + + Returns: + Dictionary with token distribution including percentages, values, and breakdown by accounts/connectors + """ + if not account_names: + # Get distribution for all accounts + return accounts_service.get_portfolio_distribution() + elif len(account_names) == 1: + # Single account - use existing method + return accounts_service.get_portfolio_distribution(account_names[0]) + else: + # Multiple accounts - need to aggregate + aggregated_distribution = { + "tokens": {}, + "total_value": 0, + "token_count": 0, + "accounts": {} + } + + for account_name in account_names: + account_dist = accounts_service.get_portfolio_distribution(account_name) + + # Skip if account doesn't exist or has error + if account_dist.get("error") or account_dist.get("token_count", 0) == 0: + continue + + # Aggregate token data + for token, token_data in account_dist.get("tokens", {}).items(): + if token not in aggregated_distribution["tokens"]: + aggregated_distribution["tokens"][token] = { + "token": token, + "value": 0, + "percentage": 0, + "accounts": {} + } + + aggregated_distribution["tokens"][token]["value"] += token_data.get("value", 0) + + # Copy account-specific data + for acc_name, acc_data in token_data.get("accounts", {}).items(): + aggregated_distribution["tokens"][token]["accounts"][acc_name] = acc_data + + aggregated_distribution["total_value"] += account_dist.get("total_value", 0) + aggregated_distribution["accounts"][account_name] = account_dist.get("accounts", {}).get(account_name, {}) + + # Recalculate percentages + total_value = aggregated_distribution["total_value"] + if total_value > 0: + for token_data in aggregated_distribution["tokens"].values(): + token_data["percentage"] = (token_data["value"] / total_value) * 100 + + aggregated_distribution["token_count"] = len(aggregated_distribution["tokens"]) + + return aggregated_distribution + + +@router.get("/accounts-distribution") +async def get_accounts_distribution( + account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get portfolio distribution by accounts with percentages. + + Args: + account_names: Optional list of account names to filter by + + Returns: + Dictionary with account distribution including percentages, values, and breakdown by connectors + """ + all_distribution = accounts_service.get_account_distribution() + + # If no filter, return all accounts + if not account_names: + return all_distribution + + # Filter the distribution by requested accounts + filtered_distribution = { + "accounts": {}, + "total_value": 0, + "account_count": 0 + } + + for account_name in account_names: + if account_name in all_distribution.get("accounts", {}): + filtered_distribution["accounts"][account_name] = all_distribution["accounts"][account_name] + filtered_distribution["total_value"] += all_distribution["accounts"][account_name].get("total_value", 0) + + # Recalculate percentages + total_value = filtered_distribution["total_value"] + if total_value > 0: + for account_data in filtered_distribution["accounts"].values(): + account_data["percentage"] = (account_data.get("total_value", 0) / total_value) * 100 + + filtered_distribution["account_count"] = len(filtered_distribution["accounts"]) + + return filtered_distribution \ No newline at end of file From 6bcb813f2343cc1771679202a45f2c7aab275d10 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 23:10:25 +0200 Subject: [PATCH 163/244] (feat) rename endpoint for deployign scripts --- routers/bot_orchestration.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index cfebc7b3..c51c59b3 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -325,13 +325,13 @@ async def stop_and_archive_bot( raise HTTPException(status_code=500, detail=str(e)) -@router.post("/create-hummingbot-instance") -async def create_hummingbot_instance( +@router.post("/deploy-v2-script") +async def deploy_v2_script( config: V2ScriptDeployment, docker_manager: DockerService = Depends(get_docker_service) ): """ - Create a new Hummingbot instance with the specified configuration. + Creates and autostart a v2 script with a configuration if present. Args: config: Configuration for the new Hummingbot instance From 685c1d2b03b7491844e575f30ba5e07264cd5181 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 23:12:23 +0200 Subject: [PATCH 164/244] (feat) add deploy of db and broker in dev mode --- run.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/run.sh b/run.sh index d0b049a0..9ff5c468 100755 --- a/run.sh +++ b/run.sh @@ -8,6 +8,7 @@ if [[ "$1" == "--dev" ]]; then echo "Running API from source..." # Activate conda environment and run with uvicorn + docker compose up emqx postgres -d conda activate backend-api uvicorn main:app --reload else From 1be4139dd26efc59d99fa95906f0d640322f5356 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 23:28:40 +0200 Subject: [PATCH 165/244] (feat) separate models for new routers --- models/__init__.py | 71 +++++++++++++++++++++ models/archived_bots.py | 134 ++++++++++++++++++++++++++++++++++++++++ models/connectors.py | 56 +++++++++++++++++ models/portfolio.py | 97 +++++++++++++++++++++++++++++ 4 files changed, 358 insertions(+) create mode 100644 models/archived_bots.py create mode 100644 models/connectors.py create mode 100644 models/portfolio.py diff --git a/models/__init__.py b/models/__init__.py index 9f91005c..a2d278fd 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -86,6 +86,46 @@ # Pagination models from .pagination import PaginatedResponse, PaginationParams, TimeRangePaginationParams +# Connector models +from .connectors import ( + ConnectorInfo, + ConnectorConfigMapResponse, + TradingRule, + ConnectorTradingRulesResponse, + ConnectorOrderTypesResponse, + ConnectorListResponse, +) + +# Portfolio models +from .portfolio import ( + TokenBalance, + ConnectorBalances, + AccountPortfolioState, + PortfolioStateResponse, + TokenDistribution, + PortfolioDistributionResponse, + AccountDistribution, + AccountsDistributionResponse, + HistoricalPortfolioState, + PortfolioHistoryFilters, +) + +# Archived bots models +from .archived_bots import ( + OrderStatus, + DatabaseStatus, + BotSummary, + PerformanceMetrics, + TradeDetail, + OrderDetail, + ExecutorInfo, + ArchivedBotListResponse, + BotPerformanceResponse, + TradeHistoryResponse, + OrderHistoryResponse, + ExecutorsResponse, +) + __all__ = [ # Bot orchestration models "BotAction", @@ -147,4 +187,35 @@ "PaginatedResponse", "PaginationParams", "TimeRangePaginationParams", + # Connector models + "ConnectorInfo", + "ConnectorConfigMapResponse", + "TradingRule", + "ConnectorTradingRulesResponse", + "ConnectorOrderTypesResponse", + "ConnectorListResponse", + # Portfolio models + "TokenBalance", + "ConnectorBalances", + "AccountPortfolioState", + "PortfolioStateResponse", + "TokenDistribution", + "PortfolioDistributionResponse", + "AccountDistribution", + "AccountsDistributionResponse", + "HistoricalPortfolioState", + "PortfolioHistoryFilters", + # Archived bots models + "OrderStatus", + "DatabaseStatus", + "BotSummary", + "PerformanceMetrics", + "TradeDetail", + "OrderDetail", + "ExecutorInfo", + "ArchivedBotListResponse", + "BotPerformanceResponse", + "TradeHistoryResponse", + "OrderHistoryResponse", + "ExecutorsResponse", ] \ No newline at end of file diff --git a/models/archived_bots.py b/models/archived_bots.py new file mode 100644 index 00000000..c03d32fe --- /dev/null +++ b/models/archived_bots.py @@ -0,0 +1,134 @@ +""" +Pydantic models for the archived bots router. + +These models define the request/response schemas for archived bot analysis endpoints. +""" + +from typing import Dict, List, Optional, Any +from datetime import datetime +from pydantic import BaseModel, Field +from enum import Enum + + +class OrderStatus(str, Enum): + """Order status enumeration""" + OPEN = "OPEN" + FILLED = "FILLED" + CANCELLED = "CANCELLED" + FAILED = "FAILED" + + +class DatabaseStatus(BaseModel): + """Database status information""" + db_path: str = Field(description="Path to the database file") + status: Dict[str, Any] = Field(description="Database health status") + healthy: bool = Field(description="Whether the database is healthy") + + +class BotSummary(BaseModel): + """Summary information for an archived bot""" + bot_name: str = Field(description="Name of the bot") + start_time: Optional[datetime] = Field(default=None, description="Bot start time") + end_time: Optional[datetime] = Field(default=None, description="Bot end time") + total_trades: int = Field(default=0, description="Total number of trades") + total_orders: int = Field(default=0, description="Total number of orders") + markets: List[str] = Field(default_factory=list, description="List of traded markets") + strategies: List[str] = Field(default_factory=list, description="List of strategies used") + + +class PerformanceMetrics(BaseModel): + """Performance metrics for an archived bot""" + total_pnl: float = Field(description="Total profit and loss") + total_volume: float = Field(description="Total trading volume") + avg_return: float = Field(description="Average return per trade") + win_rate: float = Field(description="Percentage of winning trades") + sharpe_ratio: Optional[float] = Field(default=None, description="Sharpe ratio") + max_drawdown: Optional[float] = Field(default=None, description="Maximum drawdown") + total_trades: int = Field(description="Total number of trades") + + +class TradeDetail(BaseModel): + """Detailed trade information""" + id: Optional[int] = Field(default=None, description="Trade ID") + config_file_path: str = Field(description="Configuration file path") + strategy: str = Field(description="Strategy name") + market: str = Field(description="Market/exchange name") + symbol: str = Field(description="Trading symbol") + base_asset: str = Field(description="Base asset") + quote_asset: str = Field(description="Quote asset") + timestamp: datetime = Field(description="Trade timestamp") + order_id: str = Field(description="Order ID") + trade_type: str = Field(description="Trade type (BUY/SELL)") + price: float = Field(description="Trade price") + amount: float = Field(description="Trade amount") + trade_fee: Dict[str, float] = Field(description="Trade fees") + exchange_trade_id: str = Field(description="Exchange trade ID") + leverage: Optional[int] = Field(default=None, description="Leverage used") + position: Optional[str] = Field(default=None, description="Position type") + + +class OrderDetail(BaseModel): + """Detailed order information""" + id: Optional[int] = Field(default=None, description="Order ID") + client_order_id: str = Field(description="Client order ID") + exchange_order_id: Optional[str] = Field(default=None, description="Exchange order ID") + trading_pair: str = Field(description="Trading pair") + status: OrderStatus = Field(description="Order status") + order_type: str = Field(description="Order type") + amount: float = Field(description="Order amount") + price: Optional[float] = Field(default=None, description="Order price") + creation_timestamp: datetime = Field(description="Order creation time") + last_update_timestamp: Optional[datetime] = Field(default=None, description="Last update time") + filled_amount: Optional[float] = Field(default=None, description="Filled amount") + leverage: Optional[int] = Field(default=None, description="Leverage used") + position: Optional[str] = Field(default=None, description="Position type") + + +class ExecutorInfo(BaseModel): + """Executor information""" + controller_id: str = Field(description="Controller ID") + timestamp: datetime = Field(description="Timestamp") + type: str = Field(description="Executor type") + controller_config: Dict[str, Any] = Field(description="Controller configuration") + net_pnl_flat: float = Field(description="Net PnL in flat terms") + net_pnl_pct: float = Field(description="Net PnL percentage") + total_executors: int = Field(description="Total number of executors") + total_amount: float = Field(description="Total amount") + total_spent: float = Field(description="Total spent") + + +class ArchivedBotListResponse(BaseModel): + """Response for listing archived bots""" + bots: List[str] = Field(description="List of archived bot database paths") + count: int = Field(description="Total number of archived bots") + + +class BotPerformanceResponse(BaseModel): + """Response for bot performance analysis""" + bot_name: str = Field(description="Bot name") + metrics: PerformanceMetrics = Field(description="Performance metrics") + period_start: Optional[datetime] = Field(default=None, description="Analysis period start") + period_end: Optional[datetime] = Field(default=None, description="Analysis period end") + + +class TradeHistoryResponse(BaseModel): + """Response for trade history""" + trades: List[TradeDetail] = Field(description="List of trades") + total: int = Field(description="Total number of trades") + page: int = Field(description="Current page") + page_size: int = Field(description="Page size") + + +class OrderHistoryResponse(BaseModel): + """Response for order history""" + orders: List[OrderDetail] = Field(description="List of orders") + total: int = Field(description="Total number of orders") + page: int = Field(description="Current page") + page_size: int = Field(description="Page size") + filtered_by_status: Optional[OrderStatus] = Field(default=None, description="Status filter applied") + + +class ExecutorsResponse(BaseModel): + """Response for executors information""" + executors: List[ExecutorInfo] = Field(description="List of executors") + total: int = Field(description="Total number of executors") \ No newline at end of file diff --git a/models/connectors.py b/models/connectors.py new file mode 100644 index 00000000..eea431fe --- /dev/null +++ b/models/connectors.py @@ -0,0 +1,56 @@ +""" +Pydantic models for the connectors router. + +These models define the request/response schemas for connector-related endpoints. +""" + +from typing import Dict, List, Any, Optional +from pydantic import BaseModel, Field + + +class ConnectorInfo(BaseModel): + """Information about a connector""" + name: str = Field(description="Connector name") + is_perpetual: bool = Field(default=False, description="Whether the connector supports perpetual trading") + supported_order_types: Optional[List[str]] = Field(default=None, description="Supported order types") + + +class ConnectorConfigMapResponse(BaseModel): + """Response for connector configuration requirements""" + connector_name: str = Field(description="Name of the connector") + config_fields: List[str] = Field(description="List of required configuration fields") + + +class TradingRule(BaseModel): + """Trading rules for a specific trading pair""" + min_order_size: float = Field(description="Minimum order size") + max_order_size: float = Field(description="Maximum order size") + min_price_increment: float = Field(description="Minimum price increment") + min_base_amount_increment: float = Field(description="Minimum base amount increment") + min_quote_amount_increment: float = Field(description="Minimum quote amount increment") + min_notional_size: float = Field(description="Minimum notional size") + min_order_value: float = Field(description="Minimum order value") + max_price_significant_digits: float = Field(description="Maximum price significant digits") + supports_limit_orders: bool = Field(description="Whether limit orders are supported") + supports_market_orders: bool = Field(description="Whether market orders are supported") + buy_order_collateral_token: str = Field(description="Collateral token for buy orders") + sell_order_collateral_token: str = Field(description="Collateral token for sell orders") + + +class ConnectorTradingRulesResponse(BaseModel): + """Response for connector trading rules""" + connector: str = Field(description="Connector name") + trading_pairs: Optional[List[str]] = Field(default=None, description="Filtered trading pairs if provided") + rules: Dict[str, TradingRule] = Field(description="Trading rules by trading pair") + + +class ConnectorOrderTypesResponse(BaseModel): + """Response for supported order types""" + connector: str = Field(description="Connector name") + supported_order_types: List[str] = Field(description="List of supported order types") + + +class ConnectorListResponse(BaseModel): + """Response for list of available connectors""" + connectors: List[str] = Field(description="List of available connector names") + count: int = Field(description="Total number of connectors") \ No newline at end of file diff --git a/models/portfolio.py b/models/portfolio.py new file mode 100644 index 00000000..d2d179fc --- /dev/null +++ b/models/portfolio.py @@ -0,0 +1,97 @@ +""" +Pydantic models for the portfolio router. + +These models define the request/response schemas for portfolio-related endpoints. +""" + +from typing import Dict, List, Optional, Any +from datetime import datetime +from pydantic import BaseModel, Field + + +class TokenBalance(BaseModel): + """Token balance information""" + token: str = Field(description="Token symbol") + units: float = Field(description="Number of units held") + price: float = Field(description="Current price per unit") + value: float = Field(description="Total value (units * price)") + available_units: float = Field(description="Available units (not locked in orders)") + + +class ConnectorBalances(BaseModel): + """Balances for a specific connector""" + connector_name: str = Field(description="Name of the connector") + balances: List[TokenBalance] = Field(description="List of token balances") + total_value: float = Field(description="Total value across all tokens") + + +class AccountPortfolioState(BaseModel): + """Portfolio state for a single account""" + account_name: str = Field(description="Name of the account") + connectors: Dict[str, List[TokenBalance]] = Field(description="Balances by connector") + total_value: float = Field(description="Total account value across all connectors") + last_updated: Optional[datetime] = Field(default=None, description="Last update timestamp") + + +class PortfolioStateResponse(BaseModel): + """Response for portfolio state endpoint""" + accounts: Dict[str, Dict[str, List[Dict[str, Any]]]] = Field( + description="Portfolio state by account and connector" + ) + total_portfolio_value: Optional[float] = Field(default=None, description="Total value across all accounts") + timestamp: datetime = Field(default_factory=datetime.utcnow, description="Response timestamp") + + +class TokenDistribution(BaseModel): + """Token distribution information""" + token: str = Field(description="Token symbol") + total_value: float = Field(description="Total value of this token") + total_units: float = Field(description="Total units of this token") + percentage: float = Field(description="Percentage of total portfolio") + accounts: Dict[str, Dict[str, Any]] = Field( + description="Breakdown by account and connector" + ) + + +class PortfolioDistributionResponse(BaseModel): + """Response for portfolio distribution endpoint""" + total_portfolio_value: float = Field(description="Total portfolio value") + token_count: int = Field(description="Number of unique tokens") + distribution: List[TokenDistribution] = Field(description="Token distribution list") + account_filter: str = Field( + default="all_accounts", + description="Applied account filter (all_accounts or specific accounts)" + ) + + +class AccountDistribution(BaseModel): + """Account distribution information""" + account: str = Field(description="Account name") + total_value: float = Field(description="Total value in this account") + percentage: float = Field(description="Percentage of total portfolio") + connectors: Dict[str, Dict[str, float]] = Field( + description="Value breakdown by connector" + ) + + +class AccountsDistributionResponse(BaseModel): + """Response for accounts distribution endpoint""" + total_portfolio_value: float = Field(description="Total portfolio value") + account_count: int = Field(description="Number of accounts") + distribution: List[AccountDistribution] = Field(description="Account distribution list") + + +class HistoricalPortfolioState(BaseModel): + """Historical portfolio state entry""" + timestamp: datetime = Field(description="State timestamp") + state: Dict[str, Dict[str, List[Dict[str, Any]]]] = Field( + description="Portfolio state snapshot" + ) + total_value: Optional[float] = Field(default=None, description="Total value at this point") + + +class PortfolioHistoryFilters(BaseModel): + """Filters applied to portfolio history query""" + account_names: Optional[List[str]] = Field(default=None, description="Filtered account names") + start_time: Optional[datetime] = Field(default=None, description="Start time filter") + end_time: Optional[datetime] = Field(default=None, description="End time filter") \ No newline at end of file From 6bd3f8d513aaa63025b4ed444092864a5fdc101d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 30 Jun 2025 23:28:48 +0200 Subject: [PATCH 166/244] (feat) reorder trading router --- routers/trading.py | 133 ++++++++++++--------------------------------- 1 file changed, 34 insertions(+), 99 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index d5858737..ee2912ce 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -66,6 +66,40 @@ async def place_trade(trade_request: TradeRequest, raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") +@router.post("/{account_name}/{connector_name}/orders/{client_order_id}/cancel") +async def cancel_order(account_name: str, connector_name: str, client_order_id: str, + trading_pair: str = Query(..., description="Trading pair for the order to cancel"), + accounts_service: AccountsService = Depends(get_accounts_service)): + """ + Cancel a specific order by its client order ID. + + Args: + account_name: Name of the account + connector_name: Name of the connector + client_order_id: Client order ID to cancel + trading_pair: Trading pair for the order + accounts_service: Injected accounts service + + Returns: + Success message with cancelled order ID + + Raises: + HTTPException: 404 if account/connector not found, 500 for cancellation errors + """ + try: + cancelled_order_id = await accounts_service.cancel_order( + account_name=account_name, + connector_name=connector_name, + trading_pair=trading_pair, + client_order_id=client_order_id + ) + return {"message": f"Order {cancelled_order_id} cancelled successfully"} + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") + + @router.get("/positions", response_model=List[Dict]) @@ -146,40 +180,6 @@ async def get_connector_active_orders(account_name: str, connector_name: str, raise HTTPException(status_code=500, detail=f"Error retrieving orders: {str(e)}") -@router.post("/{account_name}/{connector_name}/orders/{client_order_id}/cancel") -async def cancel_order(account_name: str, connector_name: str, client_order_id: str, - trading_pair: str = Query(..., description="Trading pair for the order to cancel"), - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Cancel a specific order by its client order ID. - - Args: - account_name: Name of the account - connector_name: Name of the connector - client_order_id: Client order ID to cancel - trading_pair: Trading pair for the order - accounts_service: Injected accounts service - - Returns: - Success message with cancelled order ID - - Raises: - HTTPException: 404 if account/connector not found, 500 for cancellation errors - """ - try: - cancelled_order_id = await accounts_service.cancel_order( - account_name=account_name, - connector_name=connector_name, - trading_pair=trading_pair, - client_order_id=client_order_id - ) - return {"message": f"Order {cancelled_order_id} cancelled successfully"} - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") - - # Global Order History @router.get("/orders", response_model=List[Dict]) async def get_all_orders( @@ -354,43 +354,6 @@ async def get_account_active_orders( return orders - -@router.get("/{account_name}/orders/summary", response_model=Dict) -async def get_account_orders_summary( - account_name: str, - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get order summary statistics for a specific account. - - Args: - account_name: Name of the account - start_time: Optional start timestamp - end_time: Optional end timestamp - accounts_service: Injected accounts service - - Returns: - Order summary statistics including fill rate, volumes, etc. - - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - summary = await accounts_service.get_orders_summary( - account_name=account_name, - start_time=start_time, - end_time=end_time, - ) - - return summary - - # Trade History @router.get("/trades", response_model=List[Dict]) async def get_all_trades( @@ -482,8 +445,6 @@ async def get_account_trades( return trades -# Trading Rules & Configuration - @router.post("/{account_name}/{connector_name}/position-mode") async def set_position_mode( account_name: str, @@ -584,32 +545,6 @@ async def set_leverage( except Exception as e: raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") - -@router.get("/{account_name}/{connector_name}/order-types") -async def get_supported_order_types(account_name: str, connector_name: str, - accounts_service: AccountsService = Depends(get_accounts_service)): - """ - Get order types supported by a specific connector. - - Args: - account_name: Name of the account - connector_name: Name of the connector - accounts_service: Injected accounts service - - Returns: - List of supported order types (LIMIT, MARKET, LIMIT_MAKER) - - Raises: - HTTPException: 404 if account or connector not found - """ - try: - connector = await accounts_service.get_connector_instance(account_name, connector_name) - return [order_type.name for order_type in connector.supported_order_types()] - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") - @router.get("/funding-payments", response_model=List[Dict]) async def get_funding_payments( account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), From f1472d972548319f2b3dfae2037c367d992f95ec Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 00:23:12 +0200 Subject: [PATCH 167/244] (feat) fix archived bots nans --- routers/archived_bots.py | 40 ++++------------------------------------ 1 file changed, 4 insertions(+), 36 deletions(-) diff --git a/routers/archived_bots.py b/routers/archived_bots.py index 3641ed0f..2ca19d2a 100644 --- a/routers/archived_bots.py +++ b/routers/archived_bots.py @@ -97,7 +97,7 @@ async def get_database_performance(db_path: str): } # Convert to records for JSON response - performance_records = performance_data.to_dict('records') + performance_records = performance_data.fillna(0).to_dict('records') # Calculate summary statistics final_row = performance_data.iloc[-1] if len(performance_data) > 0 else {} @@ -149,7 +149,7 @@ async def get_database_trades( return { "db_path": db_path, - "trades": trades_page.to_dict('records'), + "trades": trades_page.fillna(0).to_dict('records'), "pagination": { "total": total_trades, "limit": limit, @@ -194,7 +194,7 @@ async def get_database_orders( return { "db_path": db_path, - "orders": orders_page.to_dict('records'), + "orders": orders_page.fillna(0).to_dict('records'), "pagination": { "total": total_orders, "limit": limit, @@ -223,40 +223,8 @@ async def get_database_executors(db_path: str): return { "db_path": db_path, - "executors": executors.to_dict('records'), + "executors": executors.fillna(0).to_dict('records'), "total": len(executors) } except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching executors: {str(e)}") - - -@router.post("/read", response_model=List[Dict[str, Any]]) -async def read_databases(db_paths: List[str]): - """ - Read and extract basic information from multiple database files. - - Args: - db_paths: List of database file paths to read - - Returns: - List of database status information - """ - results = [] - for db_path in db_paths: - try: - db = HummingbotDatabase(db_path) - db_info = { - "db_name": db.db_name, - "db_path": db.db_path, - "healthy": db.status["general_status"], - "status": db.status, - } - except Exception as e: - db_info = { - "db_name": "", - "db_path": db_path, - "healthy": False, - "error": str(e) - } - results.append(db_info) - return results \ No newline at end of file From 898b4f7a4f997350cd850417f46512b2d66d40c2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 01:22:06 +0200 Subject: [PATCH 168/244] (feat) improve filtering and paginatino --- models/__init__.py | 12 +- models/pagination.py | 12 +- models/trading.py | 39 ++- routers/trading.py | 634 +++++++++++++++++++++++-------------------- 4 files changed, 399 insertions(+), 298 deletions(-) diff --git a/models/__init__.py b/models/__init__.py index a2d278fd..bf5c0dbd 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -38,6 +38,11 @@ TradeInfo, TradingRulesInfo, OrderTypesResponse, + OrderFilterRequest, + ActiveOrderFilterRequest, + PositionFilterRequest, + FundingPaymentFilterRequest, + TradeFilterRequest, ) # Controller models @@ -156,6 +161,11 @@ "TradeInfo", "TradingRulesInfo", "OrderTypesResponse", + "OrderFilterRequest", + "ActiveOrderFilterRequest", + "PositionFilterRequest", + "FundingPaymentFilterRequest", + "TradeFilterRequest", # Controller models "ControllerType", "Controller", @@ -185,7 +195,7 @@ "BacktestingConfig", # Pagination models "PaginatedResponse", - "PaginationParams", + "PaginationParams", "TimeRangePaginationParams", # Connector models "ConnectorInfo", diff --git a/models/pagination.py b/models/pagination.py index 67cfe6f4..32309218 100644 --- a/models/pagination.py +++ b/models/pagination.py @@ -7,14 +7,14 @@ class PaginationParams(BaseModel): """Common pagination parameters.""" limit: int = Field(default=100, ge=1, le=1000, description="Number of items per page") cursor: Optional[str] = Field(None, description="Cursor for next page") - - + + class TimeRangePaginationParams(BaseModel): - """Time-based pagination parameters.""" + """Time-based pagination parameters for trading endpoints using integer timestamps.""" limit: int = Field(default=100, ge=1, le=1000, description="Number of items per page") - start_time: Optional[datetime] = Field(None, description="Start time for filtering") - end_time: Optional[datetime] = Field(None, description="End time for filtering") - cursor: Optional[str] = Field(None, description="Cursor for next page (ISO timestamp)") + start_time: Optional[int] = Field(None, description="Start time as Unix timestamp in milliseconds") + end_time: Optional[int] = Field(None, description="End time as Unix timestamp in milliseconds") + cursor: Optional[str] = Field(None, description="Cursor for next page") class PaginatedResponse(BaseModel): diff --git a/models/trading.py b/models/trading.py index da5e8f11..b5057da9 100644 --- a/models/trading.py +++ b/models/trading.py @@ -3,6 +3,7 @@ from decimal import Decimal from datetime import datetime from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction +from .pagination import PaginationParams, TimeRangePaginationParams class TradeRequest(BaseModel): @@ -145,4 +146,40 @@ class TradingRulesInfo(BaseModel): class OrderTypesResponse(BaseModel): """Response for supported order types""" connector: str = Field(description="Connector name") - supported_order_types: List[str] = Field(description="List of supported order types") \ No newline at end of file + supported_order_types: List[str] = Field(description="List of supported order types") + + +class OrderFilterRequest(TimeRangePaginationParams): + """Request model for filtering orders with multiple criteria""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + trading_pairs: Optional[List[str]] = Field(default=None, description="List of trading pairs to filter by") + status: Optional[str] = Field(default=None, description="Order status filter") + + +class ActiveOrderFilterRequest(PaginationParams): + """Request model for filtering active orders""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + trading_pairs: Optional[List[str]] = Field(default=None, description="List of trading pairs to filter by") + + +class PositionFilterRequest(PaginationParams): + """Request model for filtering positions""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + + +class FundingPaymentFilterRequest(TimeRangePaginationParams): + """Request model for filtering funding payments""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + trading_pair: Optional[str] = Field(default=None, description="Filter by trading pair") + + +class TradeFilterRequest(TimeRangePaginationParams): + """Request model for filtering trades""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + trading_pairs: Optional[List[str]] = Field(default=None, description="List of trading pairs to filter by") + trade_types: Optional[List[str]] = Field(default=None, description="List of trade types to filter by (BUY/SELL)") \ No newline at end of file diff --git a/routers/trading.py b/routers/trading.py index ee2912ce..ca1047de 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -1,12 +1,13 @@ from typing import Dict, List, Optional -from fastapi import APIRouter, HTTPException, Depends, Query +from fastapi import APIRouter, HTTPException, Depends +from pydantic import BaseModel from hummingbot.core.data_type.common import PositionMode, TradeType, OrderType, PositionAction from starlette import status from services.accounts_service import AccountsService from deps import get_accounts_service, get_market_data_feed_manager -from models import TradeRequest, TradeResponse +from models import TradeRequest, TradeResponse, OrderFilterRequest, ActiveOrderFilterRequest, PositionFilterRequest, FundingPaymentFilterRequest, TradeFilterRequest, PaginatedResponse from models.accounts import PositionModeRequest, LeverageRequest router = APIRouter(tags=["Trading"], prefix="/trading") @@ -66,9 +67,14 @@ async def place_trade(trade_request: TradeRequest, raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") +class CancelOrderRequest(BaseModel): + """Request model for cancelling an order""" + trading_pair: str + + @router.post("/{account_name}/{connector_name}/orders/{client_order_id}/cancel") async def cancel_order(account_name: str, connector_name: str, client_order_id: str, - trading_pair: str = Query(..., description="Trading pair for the order to cancel"), + request: CancelOrderRequest, accounts_service: AccountsService = Depends(get_accounts_service)): """ Cancel a specific order by its client order ID. @@ -90,7 +96,7 @@ async def cancel_order(account_name: str, connector_name: str, client_order_id: cancelled_order_id = await accounts_service.cancel_order( account_name=account_name, connector_name=connector_name, - trading_pair=trading_pair, + trading_pair=request.trading_pair, client_order_id=client_order_id ) return {"message": f"Order {cancelled_order_id} cancelled successfully"} @@ -99,14 +105,10 @@ async def cancel_order(account_name: str, connector_name: str, client_order_id: except Exception as e: raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") - - - -@router.get("/positions", response_model=List[Dict]) +@router.post("/positions", response_model=PaginatedResponse) async def get_positions( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), - accounts_service: AccountsService = Depends(get_accounts_service) + filter_request: PositionFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get current positions across all or filtered perpetual connectors. @@ -115,11 +117,10 @@ async def get_positions( including unrealized PnL, leverage, funding fees, and margin information. Args: - account_names: Optional list of account names to filter by - connector_names: Optional list of connector names to filter by + filter_request: JSON payload with filtering criteria Returns: - List of current position dictionaries with real-time data from filtered accounts/connectors + Paginated response with position data and pagination metadata Raises: HTTPException: 500 if there's an error fetching positions @@ -129,320 +130,348 @@ async def get_positions( all_connectors = accounts_service.connector_manager.get_all_connectors() # Filter accounts - accounts_to_check = account_names if account_names else list(all_connectors.keys()) + accounts_to_check = filter_request.account_names if filter_request.account_names else list(all_connectors.keys()) for account_name in accounts_to_check: if account_name in all_connectors: # Filter connectors - connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + connectors_to_check = filter_request.connector_names if filter_request.connector_names else list(all_connectors[account_name].keys()) for connector_name in connectors_to_check: # Only fetch positions from perpetual connectors if connector_name in all_connectors[account_name] and "_perpetual" in connector_name: try: positions = await accounts_service.get_account_positions(account_name, connector_name) + # Add cursor-friendly identifier to each position + for position in positions: + position["_cursor_id"] = f"{account_name}:{connector_name}:{position.get('trading_pair', '')}" all_positions.extend(positions) except Exception as e: # Log error but continue with other connectors import logging logging.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") - return all_positions + # Sort by cursor_id for consistent pagination + all_positions.sort(key=lambda x: x.get("_cursor_id", "")) + + # Apply cursor-based pagination + start_index = 0 + if filter_request.cursor: + # Find the position after the cursor + for i, position in enumerate(all_positions): + if position.get("_cursor_id") == filter_request.cursor: + start_index = i + 1 + break + + # Get page of results + end_index = start_index + filter_request.limit + page_positions = all_positions[start_index:end_index] + + # Determine next cursor and has_more + has_more = end_index < len(all_positions) + next_cursor = page_positions[-1].get("_cursor_id") if page_positions and has_more else None + + # Clean up cursor_id from response data + for position in page_positions: + position.pop("_cursor_id", None) + + return PaginatedResponse( + data=page_positions, + pagination={ + "limit": filter_request.limit, + "has_more": has_more, + "next_cursor": next_cursor, + "total_count": len(all_positions) + } + ) except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") -# Order Management -@router.get("/{account_name}/{connector_name}/orders/active", response_model=Dict[str, Dict]) -async def get_connector_active_orders(account_name: str, connector_name: str, - accounts_service: AccountsService = Depends(get_accounts_service)): +# Active Orders Management - Real-time from connectors +@router.post("/orders/active", response_model=PaginatedResponse) +async def get_active_orders( + filter_request: ActiveOrderFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): """ - Get all active orders for a specific account and connector. + Get active (in-flight) orders across all or filtered accounts and connectors. + + This endpoint fetches real-time active orders directly from the connectors' in_flight_orders property, + providing current order status, fill amounts, and other live order data. Args: - account_name: Name of the account - connector_name: Name of the connector - accounts_service: Injected accounts service + filter_request: JSON payload with filtering criteria Returns: - Dictionary mapping order IDs to order details - + Paginated response with active order data and pagination metadata + Raises: - HTTPException: 404 if account or connector not found + HTTPException: 500 if there's an error fetching orders """ try: - return await accounts_service.get_active_orders(account_name, connector_name) - except HTTPException: - raise - except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving orders: {str(e)}") - - -# Global Order History -@router.get("/orders", response_model=List[Dict]) -async def get_all_orders( - market: Optional[str] = Query(None, description="Filter by market/connector"), - symbol: Optional[str] = Query(None, description="Filter by trading pair"), - status: Optional[str] = Query(None, description="Filter by order status"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), - offset: int = Query(0, ge=0, description="Number of orders to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get order history across all accounts. - - Args: - market: Optional filter by market/connector - symbol: Optional filter by trading pair - status: Optional filter by order status - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of orders to return - offset: Number of orders to skip + all_active_orders = [] + all_connectors = accounts_service.connector_manager.get_all_connectors() + + # Use filter request values + accounts_to_check = filter_request.account_names if filter_request.account_names else list(all_connectors.keys()) + + for account_name in accounts_to_check: + if account_name in all_connectors: + # Filter connectors + connectors_to_check = filter_request.connector_names if filter_request.connector_names else list(all_connectors[account_name].keys()) + + for connector_name in connectors_to_check: + if connector_name in all_connectors[account_name]: + try: + connector = all_connectors[account_name][connector_name] + # Get in-flight orders directly from connector + in_flight_orders = connector.in_flight_orders + + for client_order_id, order in in_flight_orders.items(): + # Apply trading pair filter if specified + if filter_request.trading_pairs and order.trading_pair not in filter_request.trading_pairs: + continue + + # Convert to JSON format for API response + order_dict = order.to_json() + order_dict.update({ + "account_name": account_name, + "connector_name": connector_name, + "_cursor_id": client_order_id # Use client_order_id as cursor + }) + all_active_orders.append(order_dict) + + except Exception as e: + # Log error but continue with other connectors + import logging + logging.warning(f"Failed to get active orders for {account_name}/{connector_name}: {e}") + + # Sort by cursor_id for consistent pagination + all_active_orders.sort(key=lambda x: x.get("_cursor_id", "")) - Returns: - List of orders across all accounts - """ - return await accounts_service.get_orders( - account_name=None, # Query all accounts - market=market, - symbol=symbol, - status=status, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - -@router.get("/orders/active", response_model=List[Dict]) -async def get_all_active_orders( - market: Optional[str] = Query(None, description="Filter by market/connector"), - symbol: Optional[str] = Query(None, description="Filter by trading pair"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get active orders across all accounts. - - Args: - market: Optional filter by market/connector - symbol: Optional filter by trading pair - accounts_service: Injected accounts service + # Apply cursor-based pagination + start_index = 0 + if filter_request.cursor: + # Find the order after the cursor + for i, order in enumerate(all_active_orders): + if order.get("_cursor_id") == filter_request.cursor: + start_index = i + 1 + break - Returns: - List of active orders across all accounts - """ - return await accounts_service.get_active_orders_history( - account_name=None, # Query all accounts - market=market, - symbol=symbol, - ) + # Get page of results + end_index = start_index + filter_request.limit + page_orders = all_active_orders[start_index:end_index] + + # Determine next cursor and has_more + has_more = end_index < len(all_active_orders) + next_cursor = page_orders[-1].get("_cursor_id") if page_orders and has_more else None + + # Clean up cursor_id from response data + for order in page_orders: + order.pop("_cursor_id", None) + + return PaginatedResponse( + data=page_orders, + pagination={ + "limit": filter_request.limit, + "has_more": has_more, + "next_cursor": next_cursor, + "total_count": len(all_active_orders) + } + ) + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching active orders: {str(e)}") -@router.get("/orders/summary", response_model=Dict) -async def get_all_orders_summary( - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), +# Historical Order Management - From registry/database +@router.post("/orders/search", response_model=PaginatedResponse) +async def get_orders( + filter_request: OrderFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ - Get order summary statistics across all accounts. + Get historical order data across all or filtered accounts from the database/registry. Args: - start_time: Optional start timestamp - end_time: Optional end timestamp - accounts_service: Injected accounts service + filter_request: JSON payload with filtering criteria Returns: - Order summary statistics including fill rate, volumes, etc. - """ - return await accounts_service.get_orders_summary( - account_name=None, # Query all accounts - start_time=start_time, - end_time=end_time, - ) - - -# Account-Specific Order History -@router.get("/{account_name}/orders", response_model=List[Dict]) -async def get_account_orders( - account_name: str, - connector_name: Optional[str] = Query(None, description="Filter by connector"), - trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), - status: Optional[str] = Query(None, description="Filter by order status"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of orders to return"), - offset: int = Query(0, ge=0, description="Number of orders to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): + Paginated response with historical order data and pagination metadata """ - Get order history for a specific account. - - Args: - account_name: Name of the account - connector_name: Optional filter by connector - trading_pair: Optional filter by trading pair - status: Optional filter by order status - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of orders to return - offset: Number of orders to skip - accounts_service: Injected accounts service + try: + all_orders = [] - Returns: - List of orders for the account + # Determine which accounts to query + if filter_request.account_names: + accounts_to_check = filter_request.account_names + else: + # Get all accounts + all_connectors = accounts_service.connector_manager.get_all_connectors() + accounts_to_check = list(all_connectors.keys()) - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - orders = await accounts_service.get_orders( - account_name=account_name, - market=connector_name, - symbol=trading_pair, - status=status, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - return orders - - -@router.get("/{account_name}/orders/active", response_model=List[Dict]) -async def get_account_active_orders( - account_name: str, - connector_name: Optional[str] = Query(None, description="Filter by connector"), - trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get active orders for a specific account. - - Args: - account_name: Name of the account - connector_name: Optional filter by connector - trading_pair: Optional filter by trading pair - accounts_service: Injected accounts service + # Collect orders from all specified accounts + for account_name in accounts_to_check: + try: + orders = await accounts_service.get_orders( + account_name=account_name, + market=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, + symbol=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, + status=filter_request.status, + start_time=filter_request.start_time, + end_time=filter_request.end_time, + limit=filter_request.limit * 2, # Get more for filtering + offset=0, + ) + # Add cursor-friendly identifier to each order + for order in orders: + order["_cursor_id"] = f"{order.get('timestamp', 0)}:{order.get('client_order_id', '')}" + all_orders.extend(orders) + except Exception as e: + # Log error but continue with other accounts + import logging + logging.warning(f"Failed to get orders for {account_name}: {e}") - Returns: - List of active orders + # Apply filters for multiple values + if filter_request.connector_names and len(filter_request.connector_names) > 1: + all_orders = [order for order in all_orders if order.get('market') in filter_request.connector_names] + if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: + all_orders = [order for order in all_orders if order.get('symbol') in filter_request.trading_pairs] - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - orders = await accounts_service.get_active_orders_history( - account_name=account_name, - market=connector_name, - symbol=trading_pair, - ) - - return orders + # Sort by timestamp (most recent first) and then by cursor_id for consistency + all_orders.sort(key=lambda x: (x.get('timestamp', 0), x.get('_cursor_id', '')), reverse=True) + + # Apply cursor-based pagination + start_index = 0 + if filter_request.cursor: + # Find the order after the cursor + for i, order in enumerate(all_orders): + if order.get("_cursor_id") == filter_request.cursor: + start_index = i + 1 + break + + # Get page of results + end_index = start_index + filter_request.limit + page_orders = all_orders[start_index:end_index] + + # Determine next cursor and has_more + has_more = end_index < len(all_orders) + next_cursor = page_orders[-1].get("_cursor_id") if page_orders and has_more else None + + # Clean up cursor_id from response data + for order in page_orders: + order.pop("_cursor_id", None) + + return PaginatedResponse( + data=page_orders, + pagination={ + "limit": filter_request.limit, + "has_more": has_more, + "next_cursor": next_cursor, + "total_count": len(all_orders) + } + ) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching orders: {str(e)}") + + # Trade History -@router.get("/trades", response_model=List[Dict]) -async def get_all_trades( - market: Optional[str] = Query(None, description="Filter by market/connector"), - symbol: Optional[str] = Query(None, description="Filter by trading pair"), - trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), - offset: int = Query(0, ge=0, description="Number of trades to skip"), +@router.post("/trades", response_model=PaginatedResponse) +async def get_trades( + filter_request: TradeFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ - Get trade history across all accounts. + Get trade history across all or filtered accounts with complex filtering. Args: - market: Optional filter by market/connector - symbol: Optional filter by trading pair - trade_type: Optional filter by trade type - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of trades to return - offset: Number of trades to skip - accounts_service: Injected accounts service + filter_request: JSON payload with filtering criteria Returns: - List of trades across all accounts + Paginated response with trade data and pagination metadata """ - return await accounts_service.get_trades( - account_name=None, # Query all accounts - market=market, - symbol=symbol, - trade_type=trade_type, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - -@router.get("/{account_name}/trades", response_model=List[Dict]) -async def get_account_trades( - account_name: str, - connector_name: Optional[str] = Query(None, description="Filter by connector"), - trading_pair: Optional[str] = Query(None, description="Filter by trading pair"), - trade_type: Optional[str] = Query(None, description="Filter by trade type (BUY/SELL)"), - start_time: Optional[int] = Query(None, description="Start timestamp in milliseconds"), - end_time: Optional[int] = Query(None, description="End timestamp in milliseconds"), - limit: int = Query(100, ge=1, le=1000, description="Maximum number of trades to return"), - offset: int = Query(0, ge=0, description="Number of trades to skip"), - accounts_service: AccountsService = Depends(get_accounts_service) -): - """ - Get trade history for a specific account. - - Args: - account_name: Name of the account - connector_name: Optional filter by connector - trading_pair: Optional filter by trading pair - trade_type: Optional filter by trade type - start_time: Optional start timestamp - end_time: Optional end timestamp - limit: Maximum number of trades to return - offset: Number of trades to skip - accounts_service: Injected accounts service + try: + all_trades = [] - Returns: - List of trades for the account + # Determine which accounts to query + if filter_request.account_names: + accounts_to_check = filter_request.account_names + else: + # Get all accounts + all_connectors = accounts_service.connector_manager.get_all_connectors() + accounts_to_check = list(all_connectors.keys()) - Raises: - HTTPException: 404 if account not found - """ - # Verify account exists - state = await accounts_service.get_account_current_state(account_name) - if not state: - raise HTTPException(status_code=404, detail=f"Account '{account_name}' not found") - - trades = await accounts_service.get_trades( - account_name=account_name, - market=connector_name, - symbol=trading_pair, - trade_type=trade_type, - start_time=start_time, - end_time=end_time, - limit=limit, - offset=offset, - ) - - return trades + # Collect trades from all specified accounts + for account_name in accounts_to_check: + try: + trades = await accounts_service.get_trades( + account_name=account_name, + market=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, + symbol=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, + trade_type=filter_request.trade_types[0] if filter_request.trade_types and len(filter_request.trade_types) == 1 else None, + start_time=filter_request.start_time, + end_time=filter_request.end_time, + limit=filter_request.limit * 2, # Get more for filtering + offset=0, + ) + # Add cursor-friendly identifier to each trade + for trade in trades: + trade["_cursor_id"] = f"{trade.get('timestamp', 0)}:{trade.get('trade_id', '')}" + all_trades.extend(trades) + except Exception as e: + # Log error but continue with other accounts + import logging + logging.warning(f"Failed to get trades for {account_name}: {e}") + + # Apply filters for multiple values + if filter_request.connector_names and len(filter_request.connector_names) > 1: + all_trades = [trade for trade in all_trades if trade.get('market') in filter_request.connector_names] + if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: + all_trades = [trade for trade in all_trades if trade.get('symbol') in filter_request.trading_pairs] + if filter_request.trade_types and len(filter_request.trade_types) > 1: + all_trades = [trade for trade in all_trades if trade.get('trade_type') in filter_request.trade_types] + + # Sort by timestamp (most recent first) and then by cursor_id for consistency + all_trades.sort(key=lambda x: (x.get('timestamp', 0), x.get('_cursor_id', '')), reverse=True) + + # Apply cursor-based pagination + start_index = 0 + if filter_request.cursor: + # Find the trade after the cursor + for i, trade in enumerate(all_trades): + if trade.get("_cursor_id") == filter_request.cursor: + start_index = i + 1 + break + + # Get page of results + end_index = start_index + filter_request.limit + page_trades = all_trades[start_index:end_index] + + # Determine next cursor and has_more + has_more = end_index < len(all_trades) + next_cursor = page_trades[-1].get("_cursor_id") if page_trades and has_more else None + + # Clean up cursor_id from response data + for trade in page_trades: + trade.pop("_cursor_id", None) + + return PaginatedResponse( + data=page_trades, + pagination={ + "limit": filter_request.limit, + "has_more": has_more, + "next_cursor": next_cursor, + "total_count": len(all_trades) + } + ) + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching trades: {str(e)}") + + @router.post("/{account_name}/{connector_name}/position-mode") @@ -545,13 +574,10 @@ async def set_leverage( except Exception as e: raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") -@router.get("/funding-payments", response_model=List[Dict]) +@router.post("/funding-payments", response_model=PaginatedResponse) async def get_funding_payments( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - connector_names: Optional[List[str]] = Query(default=None, description="Filter by connector names"), - trading_pair: Optional[str] = Query(default=None, description="Filter by trading pair"), - limit: int = Query(default=100, ge=1, le=1000, description="Maximum number of records"), - accounts_service: AccountsService = Depends(get_accounts_service) + filter_request: FundingPaymentFilterRequest, + accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get funding payment history across all or filtered perpetual connectors. @@ -560,13 +586,10 @@ async def get_funding_payments( funding rates, payment amounts, and position data at time of payment. Args: - account_names: Optional list of account names to filter by - connector_names: Optional list of connector names to filter by - trading_pair: Optional trading pair filter - limit: Maximum number of records to return + filter_request: JSON payload with filtering criteria Returns: - List of funding payment records with rates, amounts, and position data + Paginated response with funding payment data and pagination metadata Raises: HTTPException: 500 if there's an error fetching funding payments @@ -576,12 +599,12 @@ async def get_funding_payments( all_connectors = accounts_service.connector_manager.get_all_connectors() # Filter accounts - accounts_to_check = account_names if account_names else list(all_connectors.keys()) + accounts_to_check = filter_request.account_names if filter_request.account_names else list(all_connectors.keys()) for account_name in accounts_to_check: if account_name in all_connectors: # Filter connectors - connectors_to_check = connector_names if connector_names else list(all_connectors[account_name].keys()) + connectors_to_check = filter_request.connector_names if filter_request.connector_names else list(all_connectors[account_name].keys()) for connector_name in connectors_to_check: # Only fetch funding payments from perpetual connectors @@ -590,20 +613,51 @@ async def get_funding_payments( payments = await accounts_service.get_funding_payments( account_name=account_name, connector_name=connector_name, - trading_pair=trading_pair, - limit=limit + trading_pair=filter_request.trading_pair, + limit=filter_request.limit * 2 # Get more for pagination ) + # Add cursor-friendly identifier to each payment + for payment in payments: + payment["_cursor_id"] = f"{account_name}:{connector_name}:{payment.get('timestamp', '')}:{payment.get('trading_pair', '')}" all_funding_payments.extend(payments) except Exception as e: # Log error but continue with other connectors import logging logging.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") - # Sort by timestamp (most recent first) - all_funding_payments.sort(key=lambda x: x.get("timestamp", ""), reverse=True) - - # Apply limit to the combined results - return all_funding_payments[:limit] + # Sort by timestamp (most recent first) and then by cursor_id for consistency + all_funding_payments.sort(key=lambda x: (x.get("timestamp", ""), x.get("_cursor_id", "")), reverse=True) + + # Apply cursor-based pagination + start_index = 0 + if filter_request.cursor: + # Find the payment after the cursor + for i, payment in enumerate(all_funding_payments): + if payment.get("_cursor_id") == filter_request.cursor: + start_index = i + 1 + break + + # Get page of results + end_index = start_index + filter_request.limit + page_payments = all_funding_payments[start_index:end_index] + + # Determine next cursor and has_more + has_more = end_index < len(all_funding_payments) + next_cursor = page_payments[-1].get("_cursor_id") if page_payments and has_more else None + + # Clean up cursor_id from response data + for payment in page_payments: + payment.pop("_cursor_id", None) + + return PaginatedResponse( + data=page_payments, + pagination={ + "limit": filter_request.limit, + "has_more": has_more, + "next_cursor": next_cursor, + "total_count": len(all_funding_payments) + } + ) except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") \ No newline at end of file From fe4f2f230fae74e90fc1f3aa9f7d65852e43a20f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 02:05:21 +0200 Subject: [PATCH 169/244] (feat) add funding info and order book request --- services/market_data_feed_manager.py | 170 +++++++++++++++++++++++---- 1 file changed, 144 insertions(+), 26 deletions(-) diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py index fb04bc0e..88ed776f 100644 --- a/services/market_data_feed_manager.py +++ b/services/market_data_feed_manager.py @@ -264,7 +264,49 @@ async def get_prices(self, connector_name: str, trading_pairs: List[str]) -> Dic self.logger.error(f"Error getting prices for {connector_name}: {e}") return {"error": str(e)} - def get_order_book_data(self, connector_name: str, trading_pair: str, depth: int = 10) -> Dict: + async def get_funding_info(self, connector_name: str, trading_pair: str) -> Dict: + """ + Get funding information for a perpetual trading pair. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair to get funding info for + + Returns: + Dictionary containing funding information + """ + try: + # Access connector through MarketDataProvider's _rate_sources LazyDict + connector = self.market_data_provider._rate_sources[connector_name] + + # Check if this is a perpetual connector and has funding info support + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + orderbook_ds = connector._orderbook_ds + + # Get funding info from the order book data source + funding_info = await orderbook_ds.get_funding_info(trading_pair) + + if funding_info: + result = { + "trading_pair": trading_pair, + "funding_rate": float(funding_info.rate) if funding_info.rate else None, + "next_funding_time": float(funding_info.next_funding_utc_timestamp) if funding_info.next_funding_utc_timestamp else None, + "mark_price": float(funding_info.mark_price) if funding_info.mark_price else None, + "index_price": float(funding_info.index_price) if funding_info.index_price else None, + } + + self.logger.debug(f"Retrieved funding info for {connector_name}/{trading_pair}") + return result + else: + return {"error": f"No funding info available for {trading_pair}"} + else: + return {"error": f"Funding info not supported for {connector_name}"} + + except Exception as e: + self.logger.error(f"Error getting funding info for {connector_name}/{trading_pair}: {e}") + return {"error": str(e)} + + async def get_order_book_data(self, connector_name: str, trading_pair: str, depth: int = 10) -> Dict: """ Get order book data using the connector's order book data source. @@ -284,42 +326,118 @@ def get_order_book_data(self, connector_name: str, trading_pair: str, depth: int if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: orderbook_ds = connector._orderbook_ds - # Check if the trading pair is available in the order book data source - if trading_pair in orderbook_ds: - orderbook = orderbook_ds[trading_pair] - - # Get bid and ask data - bids = [] - asks = [] + # Get new order book using the data source method + order_book = await orderbook_ds.get_new_order_book(trading_pair) + snapshot = order_book.snapshot + + result = { + "trading_pair": trading_pair, + "bids": snapshot[0].loc[:(depth - 1), ["price", "amount"]].values.tolist(), + "asks": snapshot[1].loc[:(depth - 1), ["price", "amount"]].values.tolist(), + "timestamp": time.time() + } + + self.logger.debug(f"Retrieved order book for {connector_name}/{trading_pair}") + return result + else: + return {"error": f"Order book data source not available for {connector_name}"} + + except Exception as e: + self.logger.error(f"Error getting order book for {connector_name}/{trading_pair}: {e}") + return {"error": str(e)} + + async def get_order_book_query_result(self, connector_name: str, trading_pair: str, is_buy: bool, **kwargs) -> Dict: + """ + Generic method for order book queries using fresh OrderBook from data source. + + Args: + connector_name: Name of the connector + trading_pair: Trading pair + is_buy: True for buy side, False for sell side + **kwargs: Additional parameters for specific query types + + Returns: + Dictionary containing query results + """ + try: + current_time = time.time() + + # Access connector through MarketDataProvider's _rate_sources LazyDict + connector = self.market_data_provider._rate_sources[connector_name] + + # Access the order book data source + if hasattr(connector, '_orderbook_ds') and connector._orderbook_ds: + orderbook_ds = connector._orderbook_ds + + # Get fresh order book using the data source method + order_book = await orderbook_ds.get_new_order_book(trading_pair) + + if 'volume' in kwargs: + # Get price for volume + result = order_book.get_price_for_volume(is_buy, kwargs['volume']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_volume": kwargs['volume'], + "result_price": float(result.result_price) if result.result_price else None, + "result_volume": float(result.result_volume) if result.result_volume else None, + "timestamp": current_time + } - # Get top bids (highest prices first) - for i, (price, amount) in enumerate(orderbook.bid_entries()): - if i >= depth: - break - bids.append({"price": float(price), "amount": float(amount)}) + elif 'price' in kwargs: + # Get volume for price + result = order_book.get_volume_for_price(is_buy, kwargs['price']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_price": kwargs['price'], + "result_volume": float(result.result_volume) if result.result_volume else None, + "result_price": float(result.result_price) if result.result_price else None, + "timestamp": current_time + } - # Get top asks (lowest prices first) - for i, (price, amount) in enumerate(orderbook.ask_entries()): - if i >= depth: - break - asks.append({"price": float(price), "amount": float(amount)}) + elif 'quote_volume' in kwargs: + # Get price for quote volume + result = order_book.get_price_for_quote_volume(is_buy, kwargs['quote_volume']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_quote_volume": kwargs['quote_volume'], + "result_price": float(result.result_price) if result.result_price else None, + "result_volume": float(result.result_volume) if result.result_volume else None, + "timestamp": current_time + } - result = { + elif 'quote_price' in kwargs: + # Get quote volume for price + result = order_book.get_quote_volume_for_price(is_buy, kwargs['quote_price']) + return { "trading_pair": trading_pair, - "bids": bids, - "asks": asks, - "timestamp": time.time() + "is_buy": is_buy, + "query_price": kwargs['quote_price'], + "result_volume": float(result.result_volume) if result.result_volume else None, + "result_quote_volume": float(result.result_price) if result.result_price else None, # For quote volume queries, result_price contains the quote volume + "timestamp": current_time } - self.logger.debug(f"Retrieved order book for {connector_name}/{trading_pair}") - return result + elif 'vwap_volume' in kwargs: + # Get VWAP for volume + result = order_book.get_vwap_for_volume(is_buy, kwargs['vwap_volume']) + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_volume": kwargs['vwap_volume'], + "average_price": float(result.result_price) if result.result_price else None, + "result_volume": float(result.result_volume) if result.result_volume else None, + "timestamp": current_time + } else: - return {"error": f"Trading pair {trading_pair} not found in order book data source"} + return {"error": "Invalid query parameters"} else: return {"error": f"Order book data source not available for {connector_name}"} except Exception as e: - self.logger.error(f"Error getting order book for {connector_name}/{trading_pair}: {e}") + self.logger.error(f"Error in order book query for {connector_name}/{trading_pair}: {e}") return {"error": str(e)} async def _cleanup_loop(self): From 981acafffc8b82570115b8131134c058464815a3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 02:05:36 +0200 Subject: [PATCH 170/244] (feat) add endpoints to request marekt data --- models/__init__.py | 32 +++++ models/market_data.py | 106 ++++++++++++++- routers/market_data.py | 301 ++++++++++++++++++++++++++++++++++++++++- 3 files changed, 435 insertions(+), 4 deletions(-) diff --git a/models/__init__.py b/models/__init__.py index bf5c0dbd..04734461 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -72,6 +72,22 @@ MarketDataSettings, TradingRulesResponse, SupportedOrderTypesResponse, + # New enhanced market data models + PriceRequest, + PriceData, + PricesResponse, + FundingInfoRequest, + FundingInfoResponse, + OrderBookRequest, + OrderBookLevel, + OrderBookResponse, + OrderBookQueryRequest, + VolumeForPriceRequest, + PriceForVolumeRequest, + QuoteVolumeForPriceRequest, + PriceForQuoteVolumeRequest, + VWAPForVolumeRequest, + OrderBookQueryResult, ) # Account models @@ -185,6 +201,22 @@ "MarketDataSettings", "TradingRulesResponse", "SupportedOrderTypesResponse", + # New enhanced market data models + "PriceRequest", + "PriceData", + "PricesResponse", + "FundingInfoRequest", + "FundingInfoResponse", + "OrderBookRequest", + "OrderBookLevel", + "OrderBookResponse", + "OrderBookQueryRequest", + "VolumeForPriceRequest", + "PriceForVolumeRequest", + "QuoteVolumeForPriceRequest", + "PriceForQuoteVolumeRequest", + "VWAPForVolumeRequest", + "OrderBookQueryResult", # Account models "LeverageRequest", "PositionModeRequest", diff --git a/models/market_data.py b/models/market_data.py index e8f21a38..8fdf4930 100644 --- a/models/market_data.py +++ b/models/market_data.py @@ -1,6 +1,7 @@ from typing import Dict, List, Optional, Any from pydantic import BaseModel, Field from datetime import datetime +from decimal import Decimal class CandleData(BaseModel): @@ -47,4 +48,107 @@ class TradingRulesResponse(BaseModel): class SupportedOrderTypesResponse(BaseModel): """Response for supported order types""" connector: str = Field(description="Connector name") - supported_order_types: List[str] = Field(description="List of supported order types") \ No newline at end of file + supported_order_types: List[str] = Field(description="List of supported order types") + + +# New models for enhanced market data functionality + +class PriceRequest(BaseModel): + """Request model for getting prices""" + connector_name: str = Field(description="Name of the connector") + trading_pairs: List[str] = Field(description="List of trading pairs to get prices for") + + +class PriceData(BaseModel): + """Price data for a trading pair""" + trading_pair: str = Field(description="Trading pair") + price: float = Field(description="Current price") + timestamp: float = Field(description="Price timestamp") + + +class PricesResponse(BaseModel): + """Response for prices data""" + connector: str = Field(description="Connector name") + prices: Dict[str, float] = Field(description="Trading pair to price mapping") + timestamp: float = Field(description="Response timestamp") + + +class FundingInfoRequest(BaseModel): + """Request model for getting funding info""" + connector_name: str = Field(description="Name of the connector") + trading_pair: str = Field(description="Trading pair to get funding info for") + + +class FundingInfoResponse(BaseModel): + """Response for funding info""" + trading_pair: str = Field(description="Trading pair") + funding_rate: Optional[float] = Field(description="Current funding rate") + next_funding_time: Optional[float] = Field(description="Next funding time timestamp") + mark_price: Optional[float] = Field(description="Mark price") + index_price: Optional[float] = Field(description="Index price") + + +class OrderBookRequest(BaseModel): + """Request model for getting order book data""" + connector_name: str = Field(description="Name of the connector") + trading_pair: str = Field(description="Trading pair") + depth: int = Field(default=10, ge=1, le=100, description="Number of price levels to return") + + +class OrderBookLevel(BaseModel): + """Single order book level""" + price: float = Field(description="Price level") + amount: float = Field(description="Amount at this price level") + + +class OrderBookResponse(BaseModel): + """Response for order book data""" + trading_pair: str = Field(description="Trading pair") + bids: List[OrderBookLevel] = Field(description="Bid levels (highest to lowest)") + asks: List[OrderBookLevel] = Field(description="Ask levels (lowest to highest)") + timestamp: float = Field(description="Snapshot timestamp") + + +class OrderBookQueryRequest(BaseModel): + """Request model for order book queries""" + connector_name: str = Field(description="Name of the connector") + trading_pair: str = Field(description="Trading pair") + is_buy: bool = Field(description="True for buy side, False for sell side") + + +class VolumeForPriceRequest(OrderBookQueryRequest): + """Request model for getting volume at a specific price""" + price: float = Field(description="Price to query volume for") + + +class PriceForVolumeRequest(OrderBookQueryRequest): + """Request model for getting price for a specific volume""" + volume: float = Field(description="Volume to query price for") + + +class QuoteVolumeForPriceRequest(OrderBookQueryRequest): + """Request model for getting quote volume at a specific price""" + price: float = Field(description="Price to query quote volume for") + + +class PriceForQuoteVolumeRequest(OrderBookQueryRequest): + """Request model for getting price for a specific quote volume""" + quote_volume: float = Field(description="Quote volume to query price for") + + +class VWAPForVolumeRequest(OrderBookQueryRequest): + """Request model for getting VWAP for a specific volume""" + volume: float = Field(description="Volume to calculate VWAP for") + + +class OrderBookQueryResult(BaseModel): + """Response for order book query operations""" + trading_pair: str = Field(description="Trading pair") + is_buy: bool = Field(description="Query side (buy/sell)") + query_volume: Optional[float] = Field(default=None, description="Queried volume") + query_price: Optional[float] = Field(default=None, description="Queried price") + result_price: Optional[float] = Field(default=None, description="Resulting price") + result_volume: Optional[float] = Field(default=None, description="Resulting volume") + result_quote_volume: Optional[float] = Field(default=None, description="Resulting quote volume") + average_price: Optional[float] = Field(default=None, description="Average/VWAP price") + timestamp: float = Field(description="Query timestamp") \ No newline at end of file diff --git a/routers/market_data.py b/routers/market_data.py index eca9a8e5..4e479e34 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -1,11 +1,18 @@ import asyncio -from typing import Dict, List, Optional +import time -from fastapi import APIRouter, Request, HTTPException +from fastapi import APIRouter, Request, HTTPException, Depends from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig from services.market_data_feed_manager import MarketDataFeedManager +from models import ( + PriceRequest, PricesResponse, FundingInfoRequest, FundingInfoResponse, + OrderBookRequest, OrderBookResponse, OrderBookLevel, + VolumeForPriceRequest, PriceForVolumeRequest, QuoteVolumeForPriceRequest, + PriceForQuoteVolumeRequest, VWAPForVolumeRequest, OrderBookQueryResult +) +from deps import get_market_data_feed_manager -router = APIRouter(tags=["Market"], prefix="/market-data") +router = APIRouter(tags=["Market Data"], prefix="/market-data") @router.post("/candles") @@ -121,3 +128,291 @@ async def get_market_data_settings(): } +# Enhanced Market Data Endpoints + +@router.post("/prices", response_model=PricesResponse) +async def get_prices( + request: PriceRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get current prices for specified trading pairs from a connector. + + Args: + request: Price request with connector name and trading pairs + market_data_manager: Injected market data feed manager + + Returns: + Current prices for the specified trading pairs + + Raises: + HTTPException: 500 if there's an error fetching prices + """ + try: + prices = await market_data_manager.get_prices( + request.connector_name, + request.trading_pairs + ) + + if "error" in prices: + raise HTTPException(status_code=500, detail=prices["error"]) + + return PricesResponse( + connector=request.connector_name, + prices=prices, + timestamp=time.time() + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching prices: {str(e)}") + + +@router.post("/funding-info", response_model=FundingInfoResponse) +async def get_funding_info( + request: FundingInfoRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get funding information for a perpetual trading pair. + + Args: + request: Funding info request with connector name and trading pair + market_data_manager: Injected market data feed manager + + Returns: + Funding information including rates, timestamps, and prices + + Raises: + HTTPException: 400 for non-perpetual connectors, 500 for other errors + """ + try: + funding_info = await market_data_manager.get_funding_info( + request.connector_name, + request.trading_pair + ) + + if "error" in funding_info: + if "not supported" in funding_info["error"]: + raise HTTPException(status_code=400, detail=funding_info["error"]) + else: + raise HTTPException(status_code=500, detail=funding_info["error"]) + + return FundingInfoResponse(**funding_info) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching funding info: {str(e)}") + + +@router.post("/order-book", response_model=OrderBookResponse) +async def get_order_book( + request: OrderBookRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get order book snapshot with specified depth. + + Args: + request: Order book request with connector, trading pair, and depth + market_data_manager: Injected market data feed manager + + Returns: + Order book snapshot with bids and asks + + Raises: + HTTPException: 500 if there's an error fetching order book + """ + try: + order_book_data = await market_data_manager.get_order_book_data( + request.connector_name, + request.trading_pair, + request.depth + ) + + if "error" in order_book_data: + raise HTTPException(status_code=500, detail=order_book_data["error"]) + + # Convert to response format - data comes as [price, amount] lists + bids = [OrderBookLevel(price=bid[0], amount=bid[1]) for bid in order_book_data["bids"]] + asks = [OrderBookLevel(price=ask[0], amount=ask[1]) for ask in order_book_data["asks"]] + + return OrderBookResponse( + trading_pair=order_book_data["trading_pair"], + bids=bids, + asks=asks, + timestamp=order_book_data["timestamp"] + ) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching order book: {str(e)}") + + +# Order Book Query Endpoints + +@router.post("/order-book/price-for-volume", response_model=OrderBookQueryResult) +async def get_price_for_volume( + request: PriceForVolumeRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get the price required to fill a specific volume on the order book. + + Args: + request: Request with connector, trading pair, volume, and side + market_data_manager: Injected market data feed manager + + Returns: + Order book query result with price and volume information + """ + try: + result = await market_data_manager.get_order_book_query_result( + request.connector_name, + request.trading_pair, + request.is_buy, + volume=request.volume + ) + + if "error" in result: + raise HTTPException(status_code=500, detail=result["error"]) + + return OrderBookQueryResult(**result) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error in order book query: {str(e)}") + + +@router.post("/order-book/volume-for-price", response_model=OrderBookQueryResult) +async def get_volume_for_price( + request: VolumeForPriceRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get the volume available at a specific price level on the order book. + + Args: + request: Request with connector, trading pair, price, and side + market_data_manager: Injected market data feed manager + + Returns: + Order book query result with volume information + """ + try: + result = await market_data_manager.get_order_book_query_result( + request.connector_name, + request.trading_pair, + request.is_buy, + price=request.price + ) + + if "error" in result: + raise HTTPException(status_code=500, detail=result["error"]) + + return OrderBookQueryResult(**result) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error in order book query: {str(e)}") + + +@router.post("/order-book/price-for-quote-volume", response_model=OrderBookQueryResult) +async def get_price_for_quote_volume( + request: PriceForQuoteVolumeRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get the price required to fill a specific quote volume on the order book. + + Args: + request: Request with connector, trading pair, quote volume, and side + market_data_manager: Injected market data feed manager + + Returns: + Order book query result with price and volume information + """ + try: + result = await market_data_manager.get_order_book_query_result( + request.connector_name, + request.trading_pair, + request.is_buy, + quote_volume=request.quote_volume + ) + + if "error" in result: + raise HTTPException(status_code=500, detail=result["error"]) + + return OrderBookQueryResult(**result) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error in order book query: {str(e)}") + + +@router.post("/order-book/quote-volume-for-price", response_model=OrderBookQueryResult) +async def get_quote_volume_for_price( + request: QuoteVolumeForPriceRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get the quote volume available at a specific price level on the order book. + + Args: + request: Request with connector, trading pair, price, and side + market_data_manager: Injected market data feed manager + + Returns: + Order book query result with quote volume information + """ + try: + result = await market_data_manager.get_order_book_query_result( + request.connector_name, + request.trading_pair, + request.is_buy, + quote_price=request.price + ) + + if "error" in result: + raise HTTPException(status_code=500, detail=result["error"]) + + return OrderBookQueryResult(**result) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error in order book query: {str(e)}") + + +@router.post("/order-book/vwap-for-volume", response_model=OrderBookQueryResult) +async def get_vwap_for_volume( + request: VWAPForVolumeRequest, + market_data_manager: MarketDataFeedManager = Depends(get_market_data_feed_manager) +): + """ + Get the VWAP (Volume Weighted Average Price) for a specific volume on the order book. + + Args: + request: Request with connector, trading pair, volume, and side + market_data_manager: Injected market data feed manager + + Returns: + Order book query result with VWAP information + """ + try: + result = await market_data_manager.get_order_book_query_result( + request.connector_name, + request.trading_pair, + request.is_buy, + vwap_volume=request.volume + ) + + if "error" in result: + raise HTTPException(status_code=500, detail=result["error"]) + + return OrderBookQueryResult(**result) + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error in order book query: {str(e)}") + + From 6fd8a08ec9b6256a7b204c2e6a20a83b817b6716 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 02:07:30 +0200 Subject: [PATCH 171/244] (feat) add error handling for spot funding info request --- routers/market_data.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/routers/market_data.py b/routers/market_data.py index 4e479e34..9be143b0 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -187,6 +187,8 @@ async def get_funding_info( HTTPException: 400 for non-perpetual connectors, 500 for other errors """ try: + if "_perpetual" not in request.connector_name.lower(): + raise HTTPException(status_code=400, detail="Funding info is only available for perpetual trading pairs.") funding_info = await market_data_manager.get_funding_info( request.connector_name, request.trading_pair From ca19b9552645876f63a865e41e16ca8802480d5b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 02:20:52 +0200 Subject: [PATCH 172/244] (feat) update readme --- README.md | 109 ++++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 90 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 20e2c013..71e04853 100644 --- a/README.md +++ b/README.md @@ -110,55 +110,126 @@ Once the API is running, you can access it at `http://localhost:8000` The Hummingbot API is organized into several functional routers: -### =3 Docker Management (`/docker`) +### 🐳 Docker Management (`/docker`) - Check running containers and images -- Pull new Docker images +- Pull new Docker images - Start, stop, and remove containers - Monitor container status and health +- Clean up exited containers +- Archive container data locally or to S3 -### =d Account Management (`/accounts`) +### 💳 Account Management (`/accounts`) - Create and delete trading accounts - Add/remove exchange credentials - Monitor account states and balances - View portfolio distribution - Track positions and funding payments -### =� Trading Operations (`/trading`) -- Place and cancel orders across exchanges -- Monitor order status and execution -- Set leverage and position modes -- View trade history and performance -- Real-time portfolio monitoring - -### > Bot Orchestration (`/bot-orchestration`) +### 💹 Trading Operations (`/trading`) +**Enhanced with cursor-based pagination and comprehensive order/trade management** +- **Order Management**: Place, cancel, and monitor orders across all exchanges +- **Position Tracking**: Real-time positions with PnL, margin, and funding data +- **Historical Data**: Paginated order history with advanced filtering +- **Active Orders**: Live order monitoring from connector in-flight orders +- **Trade History**: Complete trade execution records with filtering +- **Funding Payments**: Historical funding payment tracking for perpetual positions +- **Portfolio Monitoring**: Real-time balance and portfolio state tracking +- **Position Modes**: Configure HEDGE/ONEWAY modes for perpetual trading +- **Leverage Management**: Set and adjust leverage per trading pair + +### 🤖 Bot Orchestration (`/bot-orchestration`) - Discover and manage active bots - Deploy new Hummingbot instances - Start/stop automated strategies - Monitor bot performance in real-time -### <� Strategy Management +### 📋 Strategy Management - **Controllers** (`/controllers`): Manage advanced strategy controllers - **Scripts** (`/scripts`): Handle traditional Hummingbot scripts - Create, edit, and remove strategy files - Configure strategy parameters -### =� Market Data (`/market-data`) -- Access real-time and historical candles -- Get trading rules and exchange information -- Monitor funding rates -- Stream live market data +### 📊 Market Data (`/market-data`) +**Completely enhanced with professional order book analysis and real-time data** +- **Price Discovery**: Real-time prices for multiple trading pairs, funding rates, mark/index prices +- **Order Book Analysis**: Live snapshots, price impact analysis, liquidity analysis, VWAP calculations +- **Historical Data**: Real-time and historical candle data with configurable intervals +- **Feed Management**: Active feed monitoring, automatic cleanup, lifecycle management -### =, Backtesting (`/backtesting`) +### 🔄 Backtesting (`/backtesting`) - Test strategies against historical data - Analyze strategy performance - Optimize parameters -### =� Analytics (`/archived-bots`) +### 📈 Analytics (`/archived-bots`) - Analyze performance of stopped bots - Generate comprehensive reports - Review historical trades and orders - Extract insights from past strategies +### 🗄️ Database Management (`/databases`) +- List and manage bot databases +- Query trading data across multiple bots +- Analyze historical performance +- Database health monitoring + +## Configuration + +### Environment Variables +Key configuration options available in `.env`: + +- **CONFIG_PASSWORD**: Encrypts API keys and credentials +- **USERNAME/PASSWORD**: API authentication credentials +- **BROKER_HOST/PORT**: EMQX message broker settings +- **DATABASE_URL**: PostgreSQL connection string +- **ACCOUNT_UPDATE_INTERVAL**: Balance update frequency (minutes) +- **AWS_API_KEY/AWS_SECRET_KEY**: S3 archiving (optional) +- **BANNED_TOKENS**: Comma-separated list of tokens to exclude +- **LOGFIRE_TOKEN**: Observability and monitoring (production) + +### Bot Instance Structure +Each bot maintains its own isolated environment: +``` +bots/instances/hummingbot-{name}/ +├── conf/ # Configuration files +├── data/ # Bot databases and state +└── logs/ # Execution logs +``` + +## Development + +### Code Quality Tools +```bash +# Install pre-commit hooks +make install-pre-commit + +# Format code (runs automatically) +black --line-length 130 . +isort --line-length 130 --profile black . +``` + +### Testing +The API includes comprehensive backtesting capabilities. Test using: +- Backtesting router for strategy validation +- Swagger UI at `http://localhost:8000/docs` +- Integration testing with live containers + +## Architecture + +### Core Components +1. **FastAPI Application**: HTTP API with Basic Auth +2. **Docker Service**: Container lifecycle management +3. **Bot Orchestrator**: Strategy deployment and monitoring +4. **Accounts Service**: Multi-exchange account management +5. **Market Data Manager**: Real-time feeds and historical data +6. **MQTT Broker**: Real-time bot communication + +### Data Models +- Orders and trades with multi-account support +- Portfolio states and balance tracking +- Position management for perpetual trading +- Historical performance analytics + ## Authentication All API endpoints require HTTP Basic Authentication. Include your configured credentials in all requests: From 7bb0737ebfdff98460633cedab91574be2362c35 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 15:45:57 +0200 Subject: [PATCH 173/244] (feat) improve env loading --- config.py | 1 + main.py | 9 ++++++--- run.sh | 1 + 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/config.py b/config.py index 17ac11a7..0e1a2597 100644 --- a/config.py +++ b/config.py @@ -111,6 +111,7 @@ class Settings(BaseSettings): model_config = SettingsConfigDict( env_file=".env", env_file_encoding="utf-8", + env_prefix="", extra="ignore" ) diff --git a/main.py b/main.py index 34272c15..863c103e 100644 --- a/main.py +++ b/main.py @@ -10,7 +10,6 @@ from hummingbot.data_feed.market_data_provider import MarketDataProvider from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger -from config import settings from utils.security import BackendAPISecurity from services.bots_orchestrator import BotsOrchestrator from services.accounts_service import AccountsService @@ -34,6 +33,12 @@ # Configure logging import logging +# Load environment variables early +load_dotenv() + +from config import settings + + # Set up logging configuration logging.basicConfig( level=logging.INFO, @@ -43,8 +48,6 @@ # Enable debug logging for MQTT manager logging.getLogger('services.mqtt_manager').setLevel(logging.DEBUG) -# Load environment variables early -load_dotenv() # Get settings from Pydantic Settings username = settings.security.username diff --git a/run.sh b/run.sh index 9ff5c468..b07ef3d8 100755 --- a/run.sh +++ b/run.sh @@ -9,6 +9,7 @@ if [[ "$1" == "--dev" ]]; then echo "Running API from source..." # Activate conda environment and run with uvicorn docker compose up emqx postgres -d + source "$(conda info --base)/etc/profile.d/conda.sh" conda activate backend-api uvicorn main:app --reload else From 5a7b96d7eaa6b1b4888617bdddd39b76434ab1ad Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 16:11:40 +0200 Subject: [PATCH 174/244] (feat) move load dotenv earlier --- main.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/main.py b/main.py index 863c103e..5abaa86a 100644 --- a/main.py +++ b/main.py @@ -4,6 +4,9 @@ import logfire from dotenv import load_dotenv +# Load environment variables early +load_dotenv() + from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.middleware.cors import CORSMiddleware @@ -32,10 +35,6 @@ # Configure logging import logging - -# Load environment variables early -load_dotenv() - from config import settings From 4d2dfc81016db8a1cabe72486cbe8214aa61e4b1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 19:08:39 +0200 Subject: [PATCH 175/244] (feat) fix connector initializations --- utils/connector_manager.py | 86 +++++++++++++++----------------------- 1 file changed, 33 insertions(+), 53 deletions(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 271268d6..2a8f465a 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -2,6 +2,9 @@ import logging from typing import Dict, List, Optional +# Create module-specific logger +logger = logging.getLogger(__name__) + from hummingbot.client.config.client_config_map import ClientConfigMap from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class @@ -45,14 +48,9 @@ async def get_connector(self, account_name: str, connector_name: str): if cache_key in self._connector_cache: return self._connector_cache[cache_key] - try: - # Create connector with full initialization - connector = await self._create_and_initialize_connector(account_name, connector_name) - self._connector_cache[cache_key] = connector - return connector - except Exception as e: - logging.error(f"Error creating connector {connector_name} for account {account_name}: {e}") - raise + # Create connector with full initialization + connector = await self._create_and_initialize_connector(account_name, connector_name) + return connector def _create_connector(self, account_name: str, connector_name: str): """ @@ -68,8 +66,8 @@ def _create_connector(self, account_name: str, connector_name: str): keys = BackendAPISecurity.api_keys(connector_name) # Debug logging - logging.info(f"Creating connector {connector_name} for account {account_name}") - logging.info(f"API keys retrieved: {list(keys.keys()) if keys else 'None'}") + logger.info(f"Creating connector {connector_name} for account {account_name}") + logger.debug(f"API keys retrieved: {list(keys.keys()) if keys else 'None'}") read_only_config = ReadOnlyClientConfigAdapter.lock_config(client_config_map) @@ -81,7 +79,7 @@ def _create_connector(self, account_name: str, connector_name: str): ) # Debug logging - logging.info(f"Init params keys: {list(init_params.keys())}") + logger.debug(f"Init params keys: {list(init_params.keys())}") connector_class = get_connector_class(connector_name) connector = connector_class(**init_params) @@ -184,25 +182,6 @@ def is_connector_initialized(self, account_name: str, connector_name: str) -> bo cache_key = f"{account_name}:{connector_name}" return cache_key in self._connector_cache - async def get_connector_state(self, account_name: str, connector_name: str) -> Dict[str, any]: - """ - Get the current state of a connector (balances, trading rules, etc). - - :param account_name: The name of the account. - :param connector_name: The name of the connector. - :return: Dictionary containing connector state information. - """ - connector = await self.get_connector(account_name, connector_name) - - return { - "balances": {k: float(v) for k, v in connector.get_all_balances().items()}, - "available_balances": {k: float(connector.get_available_balance(k)) - for k in connector.get_all_balances().keys()}, - "is_ready": connector.ready, - "name": connector.name, - "trading_required": connector.is_trading_required - } - async def _create_and_initialize_connector(self, account_name: str, connector_name: str) -> ConnectorBase: """ Create and fully initialize a connector with all necessary setup. @@ -215,10 +194,23 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na """ # Create the base connector connector = self._create_connector(account_name, connector_name) - + cache_key = f"{account_name}:{connector_name}" + + # Initialize symbol map + await connector._initialize_trading_pair_symbol_map() + + # Update initial balances + await connector._update_balances() + + # Set default position mode to HEDGE for perpetual connectors + if "_perpetual" in connector_name: + if PositionMode.HEDGE in connector.supported_position_modes(): + connector.set_position_mode(PositionMode.HEDGE) + await connector._update_positions() + + self._connector_cache[cache_key] = connector # Start order tracking if db_manager is available if self.db_manager: - cache_key = f"{account_name}:{connector_name}" if cache_key not in self._orders_recorders: # Import OrdersRecorder dynamically to avoid circular imports from services.orders_recorder import OrdersRecorder @@ -241,19 +233,7 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na # Start the connector's network without order book tracker self._start_network_without_order_book(connector) - # Initialize symbol map - await connector._initialize_trading_pair_symbol_map() - - # Update initial balances - await connector._update_balances() - - # Set default position mode to HEDGE for perpetual connectors - if "_perpetual" in connector_name: - if PositionMode.HEDGE in connector.supported_position_modes(): - connector.set_position_mode(PositionMode.HEDGE) - await connector._update_positions() - - logging.info(f"Initialized connector {connector_name} for account {account_name}") + logger.info(f"Initialized connector {connector_name} for account {account_name}") return connector def _start_network_without_order_book(self, connector: ExchangePyBase): @@ -270,10 +250,10 @@ def _start_network_without_order_book(self, connector: ExchangePyBase): connector._user_stream_event_listener_task = safe_ensure_future(connector._user_stream_event_listener()) connector._lost_orders_update_task = safe_ensure_future(connector._lost_orders_update_polling_loop()) - logging.info(f"Started connector network without order book tracker") + logger.debug(f"Started connector network without order book tracker") except Exception as e: - logging.error(f"Error starting connector network without order book: {e}") + logger.error(f"Error starting connector network without order book: {e}") async def stop_connector(self, account_name: str, connector_name: str): """ @@ -289,27 +269,27 @@ async def stop_connector(self, account_name: str, connector_name: str): try: await self._orders_recorders[cache_key].stop() del self._orders_recorders[cache_key] - logging.info(f"Stopped order recorder for {account_name}/{connector_name}") + logger.info(f"Stopped order recorder for {account_name}/{connector_name}") except Exception as e: - logging.error(f"Error stopping order recorder for {account_name}/{connector_name}: {e}") + logger.error(f"Error stopping order recorder for {account_name}/{connector_name}: {e}") # Stop funding recorder if exists if cache_key in self._funding_recorders: try: await self._funding_recorders[cache_key].stop() del self._funding_recorders[cache_key] - logging.info(f"Stopped funding recorder for {account_name}/{connector_name}") + logger.info(f"Stopped funding recorder for {account_name}/{connector_name}") except Exception as e: - logging.error(f"Error stopping funding recorder for {account_name}/{connector_name}: {e}") + logger.error(f"Error stopping funding recorder for {account_name}/{connector_name}: {e}") # Stop connector network if exists if cache_key in self._connector_cache: try: connector = self._connector_cache[cache_key] await connector.stop_network() - logging.info(f"Stopped connector network for {account_name}/{connector_name}") + logger.info(f"Stopped connector network for {account_name}/{connector_name}") except Exception as e: - logging.error(f"Error stopping connector network for {account_name}/{connector_name}: {e}") + logger.error(f"Error stopping connector network for {account_name}/{connector_name}: {e}") async def stop_all_connectors(self): """ From abf6c40d9b27ac871abf94513d6fe97f17e023f2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 19:11:52 +0200 Subject: [PATCH 176/244] (feat) improve loggign by module --- main.py | 3 +- routers/bot_orchestration.py | 23 ++++++---- routers/trading.py | 14 ++++-- services/accounts_service.py | 89 +++++++++++++++++++----------------- services/docker_service.py | 21 +++++---- services/funding_recorder.py | 8 ++-- services/orders_recorder.py | 73 +++++++++++++++-------------- utils/file_system.py | 13 ++++-- 8 files changed, 133 insertions(+), 111 deletions(-) diff --git a/main.py b/main.py index 5abaa86a..479dd552 100644 --- a/main.py +++ b/main.py @@ -3,6 +3,7 @@ from typing import Annotated import logfire +import logging from dotenv import load_dotenv # Load environment variables early load_dotenv() @@ -33,8 +34,6 @@ trading ) -# Configure logging -import logging from config import settings diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index c51c59b3..fbd6036a 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -4,6 +4,9 @@ from datetime import datetime from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks +# Create module-specific logger +logger = logging.getLogger(__name__) + from models import StartBotAction, StopBotAction, V2ScriptDeployment, V2ControllerDeployment from services.bots_orchestrator import BotsOrchestrator from services.docker_service import DockerService @@ -164,10 +167,10 @@ async def _background_stop_and_archive( ): """Background task to handle the stop and archive process""" try: - logging.info(f"Starting background stop-and-archive for {bot_name}") + logger.info(f"Starting background stop-and-archive for {bot_name}") # Step 1: Stop the bot trading process - logging.info(f"Stopping bot trading process for {bot_name_for_orchestrator}") + logger.info(f"Stopping bot trading process for {bot_name_for_orchestrator}") stop_response = await bots_manager.stop_bot( bot_name_for_orchestrator, skip_order_cancellation=skip_order_cancellation, @@ -176,11 +179,11 @@ async def _background_stop_and_archive( if not stop_response or not stop_response.get("success", False): error_msg = stop_response.get('error', 'Unknown error') if stop_response else 'No response from bot orchestrator' - logging.error(f"Failed to stop bot process: {error_msg}") + logger.error(f"Failed to stop bot process: {error_msg}") return # Step 2: Wait for graceful shutdown (15 seconds as requested) - logging.info(f"Waiting 15 seconds for bot {bot_name} to gracefully shutdown") + logger.info(f"Waiting 15 seconds for bot {bot_name} to gracefully shutdown") await asyncio.sleep(15) # Step 3: Stop the container with monitoring @@ -189,34 +192,34 @@ async def _background_stop_and_archive( container_stopped = False for i in range(max_retries): - logging.info(f"Attempting to stop container {container_name} (attempt {i+1}/{max_retries})") + logger.info(f"Attempting to stop container {container_name} (attempt {i+1}/{max_retries})") docker_manager.stop_container(container_name) # Check if container is already stopped container_status = docker_manager.get_container_status(container_name) if container_status.get("state", {}).get("status") == "exited": container_stopped = True - logging.info(f"Container {container_name} is already stopped") + logger.info(f"Container {container_name} is already stopped") break await asyncio.sleep(retry_interval) if not container_stopped: - logging.error(f"Failed to stop container {container_name} after {max_retries} attempts") + logger.error(f"Failed to stop container {container_name} after {max_retries} attempts") return # Step 4: Archive the bot data instance_dir = os.path.join('bots', 'instances', container_name) - logging.info(f"Archiving bot data from {instance_dir}") + logger.info(f"Archiving bot data from {instance_dir}") try: if archive_locally: bot_archiver.archive_locally(container_name, instance_dir) else: bot_archiver.archive_and_upload(container_name, instance_dir, bucket_name=s3_bucket) - logging.info(f"Successfully archived bot data for {container_name}") + logger.info(f"Successfully archived bot data for {container_name}") except Exception as e: - logging.error(f"Archive failed: {str(e)}") + logger.error(f"Archive failed: {str(e)}") # Continue with removal even if archive fails # Step 5: Remove the container diff --git a/routers/trading.py b/routers/trading.py index ca1047de..f17a4435 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -1,6 +1,10 @@ +import logging from typing import Dict, List, Optional from fastapi import APIRouter, HTTPException, Depends + +# Create module-specific logger +logger = logging.getLogger(__name__) from pydantic import BaseModel from hummingbot.core.data_type.common import PositionMode, TradeType, OrderType, PositionAction from starlette import status @@ -149,7 +153,7 @@ async def get_positions( except Exception as e: # Log error but continue with other connectors import logging - logging.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") + logger.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") # Sort by cursor_id for consistent pagination all_positions.sort(key=lambda x: x.get("_cursor_id", "")) @@ -247,7 +251,7 @@ async def get_active_orders( except Exception as e: # Log error but continue with other connectors import logging - logging.warning(f"Failed to get active orders for {account_name}/{connector_name}: {e}") + logger.warning(f"Failed to get active orders for {account_name}/{connector_name}: {e}") # Sort by cursor_id for consistent pagination all_active_orders.sort(key=lambda x: x.get("_cursor_id", "")) @@ -333,7 +337,7 @@ async def get_orders( except Exception as e: # Log error but continue with other accounts import logging - logging.warning(f"Failed to get orders for {account_name}: {e}") + logger.warning(f"Failed to get orders for {account_name}: {e}") # Apply filters for multiple values if filter_request.connector_names and len(filter_request.connector_names) > 1: @@ -425,7 +429,7 @@ async def get_trades( except Exception as e: # Log error but continue with other accounts import logging - logging.warning(f"Failed to get trades for {account_name}: {e}") + logger.warning(f"Failed to get trades for {account_name}: {e}") # Apply filters for multiple values if filter_request.connector_names and len(filter_request.connector_names) > 1: @@ -623,7 +627,7 @@ async def get_funding_payments( except Exception as e: # Log error but continue with other connectors import logging - logging.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") + logger.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") # Sort by timestamp (most recent first) and then by cursor_id for consistency all_funding_payments.sort(key=lambda x: (x.get("timestamp", ""), x.get("_cursor_id", "")), reverse=True) diff --git a/services/accounts_service.py b/services/accounts_service.py index 3b12c979..79aee690 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,6 +1,9 @@ import asyncio import logging from datetime import datetime + +# Create module-specific logger +logger = logging.getLogger(__name__) from decimal import Decimal from typing import Dict, List, Optional @@ -82,18 +85,18 @@ async def stop(self): Stop all accounts service tasks and cleanup resources. This is the main cleanup method that should be called during application shutdown. """ - logging.info("Stopping AccountsService...") + logger.info("Stopping AccountsService...") # Stop the account state update loop if self._update_account_state_task: self._update_account_state_task.cancel() self._update_account_state_task = None - logging.info("Stopped account state update loop") + logger.info("Stopped account state update loop") # Stop all connectors through the ConnectorManager await self.connector_manager.stop_all_connectors() - logging.info("AccountsService stopped successfully") + logger.info("AccountsService stopped successfully") async def update_account_state_loop(self): """ @@ -107,7 +110,7 @@ async def update_account_state_loop(self): await self.update_account_state() await self.dump_account_state() except Exception as e: - logging.error(f"Error updating account state: {e}") + logger.error(f"Error updating account state: {e}") finally: await asyncio.sleep(self.update_account_state_interval) @@ -129,7 +132,7 @@ async def dump_account_state(self): await repository.save_account_state(account_name, connector_name, tokens_info) except Exception as e: - logging.error(f"Error saving account state to database: {e}") + logger.error(f"Error saving account state to database: {e}") # Re-raise the exception since we no longer have a fallback raise @@ -154,7 +157,7 @@ async def load_account_state_history(self, end_time=end_time ) except Exception as e: - logging.error(f"Error loading account state history from database: {e}") + logger.error(f"Error loading account state history from database: {e}") # Return empty result since we no longer have a fallback return [], None, False @@ -180,9 +183,8 @@ async def _ensure_account_connectors_initialized(self, account_name: str): if not self.connector_manager.is_connector_initialized(account_name, connector_name): # Get connector will now handle all initialization await self.connector_manager.get_connector(account_name, connector_name) - except Exception as e: - logging.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") + logger.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") @@ -198,7 +200,7 @@ async def update_account_state(self): tokens_info = await self._get_connector_tokens_info(connector, connector_name) self.accounts_state[account_name][connector_name] = tokens_info except Exception as e: - logging.error(f"Error updating balances for connector {connector_name} in account {account_name}: {e}") + logger.error(f"Error updating balances for connector {connector_name} in account {account_name}: {e}") self.accounts_state[account_name][connector_name] = [] async def _get_connector_tokens_info(self, connector, connector_name: str) -> List[Dict]: @@ -232,10 +234,10 @@ async def _safe_get_last_traded_prices(self, connector, trading_pairs, timeout=1 last_traded = await asyncio.wait_for(connector.get_last_traded_prices(trading_pairs=trading_pairs), timeout=timeout) return last_traded except asyncio.TimeoutError: - logging.error(f"Timeout getting last traded prices for trading pairs {trading_pairs}") + logger.error(f"Timeout getting last traded prices for trading pairs {trading_pairs}") return {pair: Decimal("0") for pair in trading_pairs} except Exception as e: - logging.error(f"Error getting last traded prices in connector {connector} for trading pairs {trading_pairs}: {e}") + logger.error(f"Error getting last traded prices in connector {connector} for trading pairs {trading_pairs}: {e}") return {pair: Decimal("0") for pair in trading_pairs} def get_connector_config_map(self, connector_name: str): @@ -248,15 +250,20 @@ def get_connector_config_map(self, connector_name: str): async def add_credentials(self, account_name: str, connector_name: str, credentials: dict): """ - Add or update connector credentials and initialize the connector. + Add or update connector credentials and initialize the connector with validation. :param account_name: The name of the account. :param connector_name: The name of the connector. :param credentials: Dictionary containing the connector credentials. + :raises Exception: If credentials are invalid or connector cannot be initialized. """ - # Update the connector keys (this saves the credentials to file) - await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) - + try: + # Update the connector keys (this saves the credentials to file and validates them) + await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) + except Exception as e: + logger.error(f"Error adding connector credentials for account {account_name}: {e}") + await self.delete_credentials(account_name, connector_name) + @staticmethod def list_accounts(): """ @@ -348,7 +355,7 @@ async def get_account_current_state(self, account_name: str) -> Dict[str, List[D repository = AccountRepository(session) return await repository.get_account_current_state(account_name) except Exception as e: - logging.error(f"Error getting account current state: {e}") + logger.error(f"Error getting account current state: {e}") # Fallback to in-memory state return self.accounts_state.get(account_name, {}) @@ -374,7 +381,7 @@ async def get_account_state_history(self, end_time=end_time ) except Exception as e: - logging.error(f"Error getting account state history: {e}") + logger.error(f"Error getting account state history: {e}") return [], None, False async def get_connector_current_state(self, account_name: str, connector_name: str) -> List[Dict]: @@ -388,7 +395,7 @@ async def get_connector_current_state(self, account_name: str, connector_name: s repository = AccountRepository(session) return await repository.get_connector_current_state(account_name, connector_name) except Exception as e: - logging.error(f"Error getting connector current state: {e}") + logger.error(f"Error getting connector current state: {e}") # Fallback to in-memory state return self.accounts_state.get(account_name, {}).get(connector_name, []) @@ -416,7 +423,7 @@ async def get_connector_state_history(self, end_time=end_time ) except Exception as e: - logging.error(f"Error getting connector state history: {e}") + logger.error(f"Error getting connector state history: {e}") return [], None, False async def get_all_unique_tokens(self) -> List[str]: @@ -430,7 +437,7 @@ async def get_all_unique_tokens(self) -> List[str]: repository = AccountRepository(session) return await repository.get_all_unique_tokens() except Exception as e: - logging.error(f"Error getting unique tokens: {e}") + logger.error(f"Error getting unique tokens: {e}") # Fallback to in-memory state tokens = set() for account_data in self.accounts_state.values(): @@ -450,7 +457,7 @@ async def get_token_current_state(self, token: str) -> List[Dict]: repository = AccountRepository(session) return await repository.get_token_current_state(token) except Exception as e: - logging.error(f"Error getting token current state: {e}") + logger.error(f"Error getting token current state: {e}") return [] async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[str, any]: @@ -464,7 +471,7 @@ async def get_portfolio_value(self, account_name: Optional[str] = None) -> Dict[ repository = AccountRepository(session) return await repository.get_portfolio_value(account_name) except Exception as e: - logging.error(f"Error getting portfolio value: {e}") + logger.error(f"Error getting portfolio value: {e}") # Fallback to in-memory calculation portfolio = {"accounts": {}, "total_value": 0} @@ -576,7 +583,7 @@ def get_portfolio_distribution(self, account_name: Optional[str] = None) -> Dict } except Exception as e: - logging.error(f"Error calculating portfolio distribution: {e}") + logger.error(f"Error calculating portfolio distribution: {e}") return { "total_portfolio_value": 0, "token_count": 0, @@ -642,7 +649,7 @@ def get_account_distribution(self) -> Dict[str, any]: } except Exception as e: - logging.error(f"Error calculating account distribution: {e}") + logger.error(f"Error calculating account distribution: {e}") return { "total_portfolio_value": 0, "account_count": 0, @@ -735,7 +742,7 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair if trading_pair in prices and "error" not in prices: price = Decimal(str(prices[trading_pair])) except Exception as e: - logging.error(f"Error getting market price for {trading_pair}: {e}") + logger.error(f"Error getting market price for {trading_pair}: {e}") notional_size = price * quantized_amount if notional_size < trading_rule.min_notional_size: @@ -767,14 +774,14 @@ async def place_trade(self, account_name: str, connector_name: str, trading_pair position_action=position_action ) - logging.info(f"Placed {trade_type} order for {amount} {trading_pair} on {connector_name} (Account: {account_name}). Order ID: {order_id}") + logger.info(f"Placed {trade_type} order for {amount} {trading_pair} on {connector_name} (Account: {account_name}). Order ID: {order_id}") return order_id except HTTPException: # Re-raise HTTP exceptions as-is raise except Exception as e: - logging.error(f"Failed to place {trade_type} order: {e}") + logger.error(f"Failed to place {trade_type} order: {e}") raise HTTPException(status_code=500, detail=f"Failed to place trade: {str(e)}") async def get_connector_instance(self, account_name: str, connector_name: str): @@ -833,10 +840,10 @@ async def cancel_order(self, account_name: str, connector_name: str, try: result = connector.cancel(trading_pair=trading_pair, client_order_id=client_order_id) - logging.info(f"Cancelled order {client_order_id} on {connector_name} (Account: {account_name})") + logger.info(f"Cancelled order {client_order_id} on {connector_name} (Account: {account_name})") return result except Exception as e: - logging.error(f"Failed to cancel order {client_order_id}: {e}") + logger.error(f"Failed to cancel order {client_order_id}: {e}") raise HTTPException(status_code=500, detail=f"Failed to cancel order: {str(e)}") async def set_leverage(self, account_name: str, connector_name: str, @@ -869,11 +876,11 @@ async def set_leverage(self, account_name: str, connector_name: str, try: await connector._execute_set_leverage(trading_pair, leverage) message = f"Leverage for {trading_pair} set to {leverage} on {connector_name}" - logging.info(f"Set leverage for {trading_pair} to {leverage} on {connector_name} (Account: {account_name})") + logger.info(f"Set leverage for {trading_pair} to {leverage} on {connector_name} (Account: {account_name})") return {"status": "success", "message": message} except Exception as e: - logging.error(f"Failed to set leverage for {trading_pair} to {leverage}: {e}") + logger.error(f"Failed to set leverage for {trading_pair} to {leverage}: {e}") raise HTTPException(status_code=500, detail=f"Failed to set leverage: {str(e)}") async def set_position_mode(self, account_name: str, connector_name: str, @@ -915,11 +922,11 @@ async def set_position_mode(self, account_name: str, connector_name: str, await result message = f"Position mode set to {position_mode.value} on {connector_name}" - logging.info(f"Set position mode to {position_mode.value} on {connector_name} (Account: {account_name})") + logger.info(f"Set position mode to {position_mode.value} on {connector_name} (Account: {account_name})") return {"status": "success", "message": message} except Exception as e: - logging.error(f"Failed to set position mode to {position_mode.value}: {e}") + logger.error(f"Failed to set position mode to {position_mode.value}: {e}") raise HTTPException(status_code=500, detail=f"Failed to set position mode: {str(e)}") async def get_position_mode(self, account_name: str, connector_name: str) -> Dict[str, str]: @@ -955,7 +962,7 @@ async def get_position_mode(self, account_name: str, connector_name: str) -> Dic } except Exception as e: - logging.error(f"Failed to get position mode: {e}") + logger.error(f"Failed to get position mode: {e}") raise HTTPException(status_code=500, detail=f"Failed to get position mode: {str(e)}") async def get_orders(self, account_name: Optional[str] = None, market: Optional[str] = None, @@ -980,7 +987,7 @@ async def get_orders(self, account_name: Optional[str] = None, market: Optional[ ) return [order_repo.to_dict(order) for order in orders] except Exception as e: - logging.error(f"Error getting orders: {e}") + logger.error(f"Error getting orders: {e}") return [] async def get_active_orders_history(self, account_name: Optional[str] = None, market: Optional[str] = None, @@ -998,7 +1005,7 @@ async def get_active_orders_history(self, account_name: Optional[str] = None, ma ) return [order_repo.to_dict(order) for order in orders] except Exception as e: - logging.error(f"Error getting active orders: {e}") + logger.error(f"Error getting active orders: {e}") return [] async def get_orders_summary(self, account_name: Optional[str] = None, start_time: Optional[int] = None, @@ -1015,7 +1022,7 @@ async def get_orders_summary(self, account_name: Optional[str] = None, start_tim end_time=end_time ) except Exception as e: - logging.error(f"Error getting orders summary: {e}") + logger.error(f"Error getting orders summary: {e}") return { "total_orders": 0, "filled_orders": 0, @@ -1047,7 +1054,7 @@ async def get_trades(self, account_name: Optional[str] = None, market: Optional[ ) return [trade_repo.to_dict(trade, order) for trade, order in trade_order_pairs] except Exception as e: - logging.error(f"Error getting trades: {e}") + logger.error(f"Error getting trades: {e}") return [] async def get_account_positions(self, account_name: str, connector_name: str) -> List[Dict]: @@ -1098,7 +1105,7 @@ async def get_account_positions(self, account_name: str, connector_name: str) -> return positions except Exception as e: - logging.error(f"Failed to get positions for {connector_name}: {e}") + logger.error(f"Failed to get positions for {connector_name}: {e}") raise HTTPException(status_code=500, detail=f"Failed to get positions: {str(e)}") async def get_funding_payments(self, account_name: str, connector_name: str = None, @@ -1129,7 +1136,7 @@ async def get_funding_payments(self, account_name: str, connector_name: str = No return [funding_repo.to_dict(payment) for payment in funding_payments] except Exception as e: - logging.error(f"Error getting funding payments: {e}") + logger.error(f"Error getting funding payments: {e}") return [] async def get_total_funding_fees(self, account_name: str, connector_name: str, @@ -1157,7 +1164,7 @@ async def get_total_funding_fees(self, account_name: str, connector_name: str, ) except Exception as e: - logging.error(f"Error getting total funding fees: {e}") + logger.error(f"Error getting total funding fees: {e}") return { "total_funding_fees": 0, "payment_count": 0, diff --git a/services/docker_service.py b/services/docker_service.py index 9fdc82ec..4e96093e 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -5,6 +5,9 @@ import threading from typing import Dict +# Create module-specific logger +logger = logging.getLogger(__name__) + import docker from docker.errors import DockerException from docker.types import LogConfig @@ -31,7 +34,7 @@ def __init__(self): # Start background cleanup thread self._start_cleanup_thread() except DockerException as e: - logging.error(f"It was not possible to connect to Docker. Please make sure Docker is running. Error: {e}") + logger.error(f"It was not possible to connect to Docker. Please make sure Docker is running. Error: {e}") def get_active_containers(self, name_filter: str = None): try: @@ -183,14 +186,14 @@ def create_hummingbot_instance(self, config: V2ScriptDeployment): if os.path.exists(source_controller_file): shutil.copy2(source_controller_file, destination_controller_file) - logging.info(f"Copied controller config: {controller_file}") + logger.info(f"Copied controller config: {controller_file}") else: - logging.warning(f"Controller config file {controller_file} not found in {controllers_config_dir}") + logger.warning(f"Controller config file {controller_file} not found in {controllers_config_dir}") except Exception as e: - logging.error(f"Error reading script config file {config.script_config}: {e}") + logger.error(f"Error reading script config file {config.script_config}: {e}") else: - logging.warning(f"Script config file {config.script_config} not found in {script_config_dir}") + logger.warning(f"Script config file {config.script_config} not found in {script_config_dir}") # Path relative to fs_util base_path (which is "bots") conf_file_path = f"instances/{instance_name}/conf/conf_client.yml" client_config = fs_util.read_yaml_file(conf_file_path) @@ -250,7 +253,7 @@ def _start_cleanup_thread(self): if self._cleanup_thread is None or not self._cleanup_thread.is_alive(): self._cleanup_thread = threading.Thread(target=self._periodic_cleanup, daemon=True) self._cleanup_thread.start() - logging.info("Started Docker pull status cleanup thread") + logger.info("Started Docker pull status cleanup thread") def _periodic_cleanup(self): """Periodically clean up old pull status entries""" @@ -258,7 +261,7 @@ def _periodic_cleanup(self): try: self._cleanup_old_pull_status() except Exception as e: - logging.error(f"Error in cleanup thread: {e}") + logger.error(f"Error in cleanup thread: {e}") # Wait for the next cleanup interval self._stop_cleanup.wait(self.CLEANUP_INTERVAL_SECONDS) @@ -282,7 +285,7 @@ def _cleanup_old_pull_status(self): # Remove old entries for image_name in to_remove: del self._pull_status[image_name] - logging.info(f"Cleaned up old pull status for {image_name}") + logger.info(f"Cleaned up old pull status for {image_name}") # If still over limit, remove oldest completed/failed entries if len(self._pull_status) > self.PULL_STATUS_MAX_ENTRIES: @@ -299,7 +302,7 @@ def _cleanup_old_pull_status(self): excess_count = len(self._pull_status) - self.PULL_STATUS_MAX_ENTRIES for i in range(min(excess_count, len(completed_entries))): del self._pull_status[completed_entries[i][0]] - logging.info(f"Cleaned up excess pull status for {completed_entries[i][0]}") + logger.info(f"Cleaned up excess pull status for {completed_entries[i][0]}") def pull_image_async(self, image_name: str): """Start pulling a Docker image asynchronously with status tracking""" diff --git a/services/funding_recorder.py b/services/funding_recorder.py index 2be61a90..a12ebcdf 100644 --- a/services/funding_recorder.py +++ b/services/funding_recorder.py @@ -40,21 +40,21 @@ def start(self, connector: ConnectorBase): for event, forwarder in self._event_pairs: connector.add_listener(event, forwarder) - logging.info(f"FundingRecorder started for {self.account_name}/{self.connector_name}") + self.logger.info(f"FundingRecorder started for {self.account_name}/{self.connector_name}") async def stop(self): """Stop recording funding payments""" if self._connector: for event, forwarder in self._event_pairs: self._connector.remove_listener(event, forwarder) - logging.info(f"FundingRecorder stopped for {self.account_name}/{self.connector_name}") + self.logger.info(f"FundingRecorder stopped for {self.account_name}/{self.connector_name}") def _did_funding_payment(self, event_tag: int, market: ConnectorBase, event: FundingPaymentCompletedEvent): """Handle funding payment events - called by SourceInfoEventForwarder""" try: asyncio.create_task(self._handle_funding_payment(event)) except Exception as e: - logging.error(f"Error in _did_funding_payment: {e}") + self.logger.error(f"Error in _did_funding_payment: {e}") async def _handle_funding_payment(self, event: FundingPaymentCompletedEvent): """Handle funding payment events""" @@ -72,7 +72,7 @@ async def _handle_funding_payment(self, event: FundingPaymentCompletedEvent): } break except Exception as e: - logging.warning(f"Could not get position data for funding payment: {e}") + self.logger.warning(f"Could not get position data for funding payment: {e}") # Record the funding payment await self.record_funding_payment(event, self.account_name, self.connector_name, position_data) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 458d4f74..6f640fb2 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -2,6 +2,9 @@ import logging import math import time + +# Create module-specific logger +logger = logging.getLogger(__name__) from typing import Any, Optional, Union from datetime import datetime from decimal import Decimal, InvalidOperation @@ -56,28 +59,28 @@ def start(self, connector: ConnectorBase): # Subscribe to order events using the same pattern as MarketsRecorder for event, forwarder in self._event_pairs: connector.add_listener(event, forwarder) - logging.info(f"OrdersRecorder: Added listener for {event} with forwarder {forwarder}") + logger.info(f"OrdersRecorder: Added listener for {event} with forwarder {forwarder}") # Debug: Check if listeners were actually added if hasattr(connector, '_event_listeners'): listeners = connector._event_listeners.get(event, []) - logging.info(f"OrdersRecorder: Event {event} now has {len(listeners)} listeners") + logger.info(f"OrdersRecorder: Event {event} now has {len(listeners)} listeners") for i, listener in enumerate(listeners): - logging.info(f"OrdersRecorder: Listener {i}: {listener}") + logger.info(f"OrdersRecorder: Listener {i}: {listener}") - logging.info(f"OrdersRecorder started for {self.account_name}/{self.connector_name} with {len(self._event_pairs)} event listeners") + logger.info(f"OrdersRecorder started for {self.account_name}/{self.connector_name} with {len(self._event_pairs)} event listeners") # Debug: Print connector info - logging.info(f"OrdersRecorder: Connector type: {type(connector)}") - logging.info(f"OrdersRecorder: Connector name: {getattr(connector, 'name', 'unknown')}") - logging.info(f"OrdersRecorder: Connector ready: {getattr(connector, 'ready', 'unknown')}") + logger.info(f"OrdersRecorder: Connector type: {type(connector)}") + logger.info(f"OrdersRecorder: Connector name: {getattr(connector, 'name', 'unknown')}") + logger.info(f"OrdersRecorder: Connector ready: {getattr(connector, 'ready', 'unknown')}") # Test if forwarders are callable for event, forwarder in self._event_pairs: if callable(forwarder): - logging.info(f"OrdersRecorder: Forwarder for {event} is callable") + logger.info(f"OrdersRecorder: Forwarder for {event} is callable") else: - logging.error(f"OrdersRecorder: Forwarder for {event} is NOT callable: {type(forwarder)}") + logger.error(f"OrdersRecorder: Forwarder for {event} is NOT callable: {type(forwarder)}") async def stop(self): """Stop recording orders""" @@ -86,7 +89,7 @@ async def stop(self): for event, forwarder in self._event_pairs: self._connector.remove_listener(event, forwarder) - logging.info(f"OrdersRecorder stopped for {self.account_name}/{self.connector_name}") + logger.info(f"OrdersRecorder stopped for {self.account_name}/{self.connector_name}") def _extract_error_message(self, event) -> str: """Extract error message from various possible event attributes.""" @@ -102,46 +105,46 @@ def _extract_error_message(self, event) -> str: def _did_create_order(self, event_tag: int, market: ConnectorBase, event: Union[BuyOrderCreatedEvent, SellOrderCreatedEvent]): """Handle order creation events - called by SourceInfoEventForwarder""" - logging.info(f"OrdersRecorder: _did_create_order called for order {getattr(event, 'order_id', 'unknown')}") + logger.info(f"OrdersRecorder: _did_create_order called for order {getattr(event, 'order_id', 'unknown')}") try: # Determine trade type from event trade_type = TradeType.BUY if isinstance(event, BuyOrderCreatedEvent) else TradeType.SELL - logging.info(f"OrdersRecorder: Creating task to handle order created - {trade_type} order") + logger.info(f"OrdersRecorder: Creating task to handle order created - {trade_type} order") asyncio.create_task(self._handle_order_created(event, trade_type)) except Exception as e: - logging.error(f"Error in _did_create_order: {e}") + logger.error(f"Error in _did_create_order: {e}") def _did_fill_order(self, event_tag: int, market: ConnectorBase, event: OrderFilledEvent): """Handle order fill events - called by SourceInfoEventForwarder""" try: asyncio.create_task(self._handle_order_filled(event)) except Exception as e: - logging.error(f"Error in _did_fill_order: {e}") + logger.error(f"Error in _did_fill_order: {e}") def _did_cancel_order(self, event_tag: int, market: ConnectorBase, event: Any): """Handle order cancel events - called by SourceInfoEventForwarder""" try: asyncio.create_task(self._handle_order_cancelled(event)) except Exception as e: - logging.error(f"Error in _did_cancel_order: {e}") + logger.error(f"Error in _did_cancel_order: {e}") def _did_fail_order(self, event_tag: int, market: ConnectorBase, event: Any): """Handle order failure events - called by SourceInfoEventForwarder""" try: asyncio.create_task(self._handle_order_failed(event)) except Exception as e: - logging.error(f"Error in _did_fail_order: {e}") + logger.error(f"Error in _did_fail_order: {e}") def _did_complete_order(self, event_tag: int, market: ConnectorBase, event: Any): """Handle order completion events - called by SourceInfoEventForwarder""" try: asyncio.create_task(self._handle_order_completed(event)) except Exception as e: - logging.error(f"Error in _did_complete_order: {e}") + logger.error(f"Error in _did_complete_order: {e}") async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrderCreatedEvent], trade_type: TradeType): """Handle order creation events""" - logging.info(f"OrdersRecorder: _handle_order_created started for order {event.order_id}") + logger.info(f"OrdersRecorder: _handle_order_created started for order {event.order_id}") try: async with self.db_manager.get_session_context() as session: order_repo = OrderRepository(session) @@ -159,9 +162,9 @@ async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrd } await order_repo.create_order(order_data) - logging.info(f"OrdersRecorder: Successfully recorded order created: {event.order_id}") + logger.info(f"OrdersRecorder: Successfully recorded order created: {event.order_id}") except Exception as e: - logging.error(f"OrdersRecorder: Error recording order created: {e}") + logger.error(f"OrdersRecorder: Error recording order created: {e}") async def _handle_order_filled(self, event: OrderFilledEvent): """Handle order fill events""" @@ -187,7 +190,7 @@ async def _handle_order_filled(self, event: OrderFilledEvent): trade_fee_paid = float(fee_in_quote) trade_fee_currency = quote_asset except Exception as e: - logging.error(f"Error calculating trade fee: {e}") + logger.error(f"Error calculating trade fee: {e}") trade_fee_paid = 0 trade_fee_currency = None @@ -205,7 +208,7 @@ async def _handle_order_filled(self, event: OrderFilledEvent): fee_currency=trade_fee_currency ) except (ValueError, InvalidOperation) as e: - logging.error(f"Error processing order fill for {event.order_id}: {e}, skipping update") + logger.error(f"Error processing order fill for {event.order_id}: {e}, skipping update") return # Create trade record using validated values @@ -228,12 +231,12 @@ async def _handle_order_filled(self, event: OrderFilledEvent): } await trade_repo.create_trade(trade_data) except (ValueError, TypeError) as e: - logging.error(f"Error creating trade record for {event.order_id}: {e}") - logging.error(f"Trade data that failed: timestamp={event.timestamp}, amount={event.amount}, price={event.price}, fee={trade_fee_paid}") + logger.error(f"Error creating trade record for {event.order_id}: {e}") + logger.error(f"Trade data that failed: timestamp={event.timestamp}, amount={event.amount}, price={event.price}, fee={trade_fee_paid}") - logging.debug(f"Recorded order fill: {event.order_id} - {event.amount} @ {event.price}") + logger.debug(f"Recorded order fill: {event.order_id} - {event.amount} @ {event.price}") except Exception as e: - logging.error(f"Error recording order fill: {e}") + logger.error(f"Error recording order fill: {e}") async def _handle_order_cancelled(self, event: Any): """Handle order cancellation events""" @@ -245,9 +248,9 @@ async def _handle_order_cancelled(self, event: Any): status="CANCELLED" ) - logging.debug(f"Recorded order cancelled: {event.order_id}") + logger.debug(f"Recorded order cancelled: {event.order_id}") except Exception as e: - logging.error(f"Error recording order cancellation: {e}") + logger.error(f"Error recording order cancellation: {e}") def _get_order_details_from_connector(self, order_id: str) -> Optional[dict]: """Try to get order details from connector's tracked orders""" @@ -263,7 +266,7 @@ def _get_order_details_from_connector(self, order_id: str) -> Optional[dict]: "price": float(in_flight_order.price) if in_flight_order.price else None } except Exception as e: - logging.error(f"Error getting order details from connector: {e}") + logger.error(f"Error getting order details from connector: {e}") return None async def _handle_order_failed(self, event: Any): @@ -284,12 +287,12 @@ async def _handle_order_failed(self, event: Any): status="FAILED", error_message=error_msg ) - logging.info(f"Updated existing order {event.order_id} to FAILED status") + logger.info(f"Updated existing order {event.order_id} to FAILED status") else: # Try to get order details from connector's tracked orders order_details = self._get_order_details_from_connector(event.order_id) if order_details: - logging.info(f"Retrieved order details from connector for {event.order_id}: {order_details}") + logger.info(f"Retrieved order details from connector for {event.order_id}: {order_details}") # Create order record as FAILED with available details if order_details: @@ -321,10 +324,10 @@ async def _handle_order_failed(self, event: Any): } await order_repo.create_order(order_data) - logging.info(f"Created failed order record for {event.order_id}") + logger.info(f"Created failed order record for {event.order_id}") except Exception as e: - logging.error(f"Error recording order failure: {e}") + logger.error(f"Error recording order failure: {e}") async def _handle_order_completed(self, event: Any): """Handle order completion events""" @@ -336,6 +339,6 @@ async def _handle_order_completed(self, event: Any): order.status = "FILLED" order.exchange_order_id = getattr(event, 'exchange_order_id', None) - logging.debug(f"Recorded order completed: {event.order_id}") + logger.debug(f"Recorded order completed: {event.order_id}") except Exception as e: - logging.error(f"Error recording order completion: {e}") \ No newline at end of file + logger.error(f"Error recording order completion: {e}") \ No newline at end of file diff --git a/utils/file_system.py b/utils/file_system.py index c516eb29..76250ee5 100644 --- a/utils/file_system.py +++ b/utils/file_system.py @@ -2,6 +2,9 @@ import inspect import logging import os + +# Create module-specific logger +logger = logging.getLogger(__name__) import shutil import sys from pathlib import Path @@ -281,7 +284,7 @@ def load_script_config_class(script_name: str) -> Optional[Type[BaseClientModel] if issubclass(cls, BaseClientModel) and cls is not BaseClientModel: return cls except (ImportError, AttributeError, ModuleNotFoundError) as e: - logging.warning(f"Error loading script class for '{script_name}': {e}") + logger.warning(f"Error loading script class for '{script_name}': {e}") return None @staticmethod @@ -307,7 +310,7 @@ def load_controller_config_class(controller_type: str, controller_name: str) -> or (issubclass(cls, ControllerConfigBase) and cls is not ControllerConfigBase): return cls except (ImportError, AttributeError, ModuleNotFoundError) as e: - logging.warning(f"Error loading controller class for '{controller_type}.{controller_name}': {e}") + logger.warning(f"Error loading controller class for '{controller_type}.{controller_name}': {e}") return None def ensure_file_and_dump_text(self, file_path: str, text: str) -> None: @@ -345,7 +348,7 @@ def save_model_to_yml(self, yml_path: str, cm: ClientConfigAdapter) -> None: with open(full_path, "w", encoding="utf-8") as outfile: outfile.write(cm_yml_str) except Exception as e: - logging.error(f"Error writing configs to '{yml_path}': {e}", exc_info=True) + logger.error(f"Error writing configs to '{yml_path}': {e}", exc_info=True) raise def get_base_path(self) -> str: @@ -417,7 +420,7 @@ def list_databases(self) -> List[str]: if db_file.endswith(".sqlite") ]) except (OSError, PermissionError) as e: - logging.warning(f"Error accessing database path '{db_path}': {e}") + logger.warning(f"Error accessing database path '{db_path}': {e}") return archived_databases def list_checkpoints(self, full_path: bool = False) -> List[str]: @@ -444,7 +447,7 @@ def list_checkpoints(self, full_path: bool = False) -> List[str]: else: return checkpoint_files except (OSError, PermissionError) as e: - logging.warning(f"Error listing checkpoints in '{dir_path}': {e}") + logger.warning(f"Error listing checkpoints in '{dir_path}': {e}") return [] fs_util = FileSystemUtil() \ No newline at end of file From 0d9ce4e4a719a03d548b473c96ffe8d40d229b8d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 19:35:25 +0200 Subject: [PATCH 177/244] (feat) update v2 with controllers --- bots/scripts/v2_with_controllers.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/bots/scripts/v2_with_controllers.py b/bots/scripts/v2_with_controllers.py index df94f08a..1332fcdf 100644 --- a/bots/scripts/v2_with_controllers.py +++ b/bots/scripts/v2_with_controllers.py @@ -4,9 +4,7 @@ from hummingbot.client.hummingbot_application import HummingbotApplication from hummingbot.connector.connector_base import ConnectorBase -from hummingbot.core.clock import Clock from hummingbot.data_feed.candles_feed.data_types import CandlesConfig -from hummingbot.remote_iface.mqtt import ETopicPublisher from hummingbot.strategy.strategy_v2_base import StrategyV2Base, StrategyV2ConfigBase from hummingbot.strategy_v2.models.base import RunnableStatus from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, StopExecutorAction @@ -41,19 +39,6 @@ def __init__(self, connectors: Dict[str, ConnectorBase], config: V2WithControlle self.drawdown_exited_controllers = [] self.closed_executors_buffer: int = 30 self._last_performance_report_timestamp = 0 - self.mqtt_enabled = HummingbotApplication.main_application()._mqtt is not None - self._pub: Optional[ETopicPublisher] = None - - def start(self, clock: Clock, timestamp: float) -> None: - """ - Start the strategy. - :param clock: Clock to use. - :param timestamp: Current time. - """ - self._last_timestamp = timestamp - self.apply_initial_setting() - if self.mqtt_enabled: - self._pub = ETopicPublisher("performance", use_bot_prefix=True) async def on_stop(self): await super().on_stop() @@ -107,7 +92,7 @@ def check_max_global_drawdown(self): HummingbotApplication.main_application().stop() def send_performance_report(self): - if self.current_timestamp - self._last_performance_report_timestamp >= self.performance_report_interval and self.mqtt_enabled: + if self.current_timestamp - self._last_performance_report_timestamp >= self.performance_report_interval and self._pub: performance_reports = {controller_id: self.get_performance_report(controller_id).dict() for controller_id in self.controllers.keys()} self._pub(performance_reports) self._last_performance_report_timestamp = self.current_timestamp From 935d375dd541acd29fc56db14b9e104cff12885c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 19:39:38 +0200 Subject: [PATCH 178/244] (feat) update on stop --- bots/scripts/v2_with_controllers.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/bots/scripts/v2_with_controllers.py b/bots/scripts/v2_with_controllers.py index 1332fcdf..d9c33621 100644 --- a/bots/scripts/v2_with_controllers.py +++ b/bots/scripts/v2_with_controllers.py @@ -40,12 +40,6 @@ def __init__(self, connectors: Dict[str, ConnectorBase], config: V2WithControlle self.closed_executors_buffer: int = 30 self._last_performance_report_timestamp = 0 - async def on_stop(self): - await super().on_stop() - if self.mqtt_enabled: - self._pub({controller_id: {} for controller_id in self.controllers.keys()}) - self._pub = None - def on_tick(self): super().on_tick() self.check_manual_kill_switch() From 3c7bc8d15972ad7fbafb02fe66438201596c345a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 1 Jul 2025 19:46:09 +0200 Subject: [PATCH 179/244] (feat) improve active containers info --- services/docker_service.py | 50 +++++++++++++++++++++++++++++--------- 1 file changed, 39 insertions(+), 11 deletions(-) diff --git a/services/docker_service.py b/services/docker_service.py index 4e96093e..3ad89750 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -40,12 +40,26 @@ def get_active_containers(self, name_filter: str = None): try: all_containers = self.client.containers.list(filters={"status": "running"}) if name_filter: - containers_info = [{"id": container.id, "name": container.name, "status": container.status} for - container in all_containers if name_filter.lower() in container.name.lower()] + containers_info = [ + { + "id": container.id, + "name": container.name, + "status": container.status, + "image": container.image.tags[0] if container.image.tags else container.image.id[:12] + } + for container in all_containers if name_filter.lower() in container.name.lower() + ] else: - containers_info = [{"id": container.id, "name": container.name, "status": container.status} for - container in all_containers] - return {"active_instances": containers_info} + containers_info = [ + { + "id": container.id, + "name": container.name, + "status": container.status, + "image": container.image.tags[0] if container.image.tags else container.image.id[:12] + } + for container in all_containers + ] + return containers_info except DockerException as e: return str(e) @@ -72,14 +86,28 @@ def pull_image_sync(self, image_name): def get_exited_containers(self, name_filter: str = None): try: - all_containers = self.client.containers.list(filters={"status": "exited"}) + all_containers = self.client.containers.list(filters={"status": "exited"}, all=True) if name_filter: - containers_info = [{"id": container.id, "name": container.name, "status": container.status} for - container in all_containers if name_filter.lower() in container.name.lower()] + containers_info = [ + { + "id": container.id, + "name": container.name, + "status": container.status, + "image": container.image.tags[0] if container.image.tags else container.image.id[:12] + } + for container in all_containers if name_filter.lower() in container.name.lower() + ] else: - containers_info = [{"id": container.id, "name": container.name, "status": container.status} for - container in all_containers] - return {"exited_instances": containers_info} + containers_info = [ + { + "id": container.id, + "name": container.name, + "status": container.status, + "image": container.image.tags[0] if container.image.tags else container.image.id[:12] + } + for container in all_containers + ] + return containers_info except DockerException as e: return str(e) From aba389109ae5b35033ec5ead29afd49a0d6122a2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 00:48:20 +0200 Subject: [PATCH 180/244] (feat) simplify bot orchestration containers naming --- routers/bot_orchestration.py | 12 +++--------- services/docker_service.py | 2 +- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index fbd6036a..11bb2001 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -264,15 +264,9 @@ async def stop_and_archive_bot( """ try: # Step 1: Normalize bot name and container name - # Handle both "process-king" and "hummingbot-process-king" input formats - if bot_name.startswith("hummingbot-"): - # If full container name is passed, extract the bot name - actual_bot_name = bot_name.replace("hummingbot-", "") - container_name = bot_name - else: - # If just bot name is passed, construct container name - actual_bot_name = bot_name - container_name = f"hummingbot-{bot_name}" + # Container name is now the same as bot name (no prefix added) + actual_bot_name = bot_name + container_name = bot_name logging.info(f"Normalized bot_name: {actual_bot_name}, container_name: {container_name}") diff --git a/services/docker_service.py b/services/docker_service.py index 3ad89750..033a9ca7 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -163,7 +163,7 @@ def remove_container(self, container_name, force=True): def create_hummingbot_instance(self, config: V2ScriptDeployment): bots_path = os.environ.get('BOTS_PATH', self.SOURCE_PATH) # Default to 'SOURCE_PATH' if BOTS_PATH is not set - instance_name = f"hummingbot-{config.instance_name}" + instance_name = config.instance_name instance_dir = os.path.join("bots", 'instances', instance_name) if not os.path.exists(instance_dir): os.makedirs(instance_dir) From b6ee5d2fac34fb921ab00c0f4e65c7fd3b6ebac2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 16:33:32 +0200 Subject: [PATCH 181/244] (feat) remove blue in code --- setup.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.sh b/setup.sh index 4ccbf2be..c4e942b9 100755 --- a/setup.sh +++ b/setup.sh @@ -137,10 +137,10 @@ if [ ! -f "bots/credentials/master_account/.password_verification" ]; then echo "" fi -echo -e "${YELLOW}Next steps:${NC}" -echo "1. Review the .env file if needed: ${BLUE}cat .env${NC}" -echo "2. Install dependencies: ${BLUE}make install${NC}" -echo "3. Start the API: ${BLUE}make run${NC}" +echo -e "Next steps:" +echo "1. Review the .env file if needed: cat .env" +echo "2. Install dependencies: make install" +echo "3. Start the API: make run" echo "" echo -e "${PURPLE}💡 Pro tip:${NC} You can modify environment variables in .env file anytime" echo -e "${PURPLE}📚 Documentation:${NC} Check config.py for all available settings" From a16fa1bc62deadcb9fa86bf18c7b7d92acaabfc1 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 18:11:31 +0200 Subject: [PATCH 182/244] (feat) remove error that adds extra folder --- routers/bot_orchestration.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 11bb2001..12aa2a4a 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -390,8 +390,7 @@ async def deploy_v2_controllers( # Save the script config to the scripts directory scripts_dir = os.path.join("conf", "scripts") - os.makedirs(scripts_dir, exist_ok=True) - + script_config_path = os.path.join(scripts_dir, script_config_filename) fs_util.dump_dict_to_yaml(script_config_path, script_config_content) From adbb5e1b91d51605fdcf30efea2d365d29d9afa9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 18:37:02 +0200 Subject: [PATCH 183/244] (feat) update to latest v2 with controllers version --- bots/scripts/v2_with_controllers.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/bots/scripts/v2_with_controllers.py b/bots/scripts/v2_with_controllers.py index d9c33621..d345f0bf 100644 --- a/bots/scripts/v2_with_controllers.py +++ b/bots/scripts/v2_with_controllers.py @@ -14,8 +14,8 @@ class V2WithControllersConfig(StrategyV2ConfigBase): script_file_name: str = os.path.basename(__file__) candles_config: List[CandlesConfig] = [] markets: Dict[str, Set[str]] = {} - max_global_drawdown: Optional[float] = None - max_controller_drawdown: Optional[float] = None + max_global_drawdown_quote: Optional[float] = None + max_controller_drawdown_quote: Optional[float] = None class V2WithControllers(StrategyV2Base): @@ -42,14 +42,15 @@ def __init__(self, connectors: Dict[str, ConnectorBase], config: V2WithControlle def on_tick(self): super().on_tick() - self.check_manual_kill_switch() - self.control_max_drawdown() - self.send_performance_report() + if not self._is_stop_triggered: + self.check_manual_kill_switch() + self.control_max_drawdown() + self.send_performance_report() def control_max_drawdown(self): - if self.config.max_controller_drawdown: + if self.config.max_controller_drawdown_quote: self.check_max_controller_drawdown() - if self.config.max_global_drawdown: + if self.config.max_global_drawdown_quote: self.check_max_global_drawdown() def check_max_controller_drawdown(self): @@ -62,7 +63,7 @@ def check_max_controller_drawdown(self): self.max_pnl_by_controller[controller_id] = controller_pnl else: current_drawdown = last_max_pnl - controller_pnl - if current_drawdown > self.config.max_controller_drawdown: + if current_drawdown > self.config.max_controller_drawdown_quote: self.logger().info(f"Controller {controller_id} reached max drawdown. Stopping the controller.") controller.stop() executors_order_placed = self.filter_executors( @@ -80,9 +81,10 @@ def check_max_global_drawdown(self): self.max_global_pnl = current_global_pnl else: current_global_drawdown = self.max_global_pnl - current_global_pnl - if current_global_drawdown > self.config.max_global_drawdown: + if current_global_drawdown > self.config.max_global_drawdown_quote: self.drawdown_exited_controllers.extend(list(self.controllers.keys())) self.logger().info("Global drawdown reached. Stopping the strategy.") + self._is_stop_triggered = True HummingbotApplication.main_application().stop() def send_performance_report(self): @@ -92,8 +94,6 @@ def send_performance_report(self): self._last_performance_report_timestamp = self.current_timestamp def check_manual_kill_switch(self): - if self._is_stop_triggered: - return for controller_id, controller in self.controllers.items(): if controller.config.manual_kill_switch and controller.status == RunnableStatus.RUNNING: self.logger().info(f"Manual cash out for controller {controller_id}.") From 06129041191bff7a51ac38a4c93abc8244778aa4 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 18:39:23 +0200 Subject: [PATCH 184/244] (feat) update to drawdown naming --- models/bot_orchestration.py | 4 ++-- routers/bot_orchestration.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/models/bot_orchestration.py b/models/bot_orchestration.py index 61ff5341..b328133a 100644 --- a/models/bot_orchestration.py +++ b/models/bot_orchestration.py @@ -108,6 +108,6 @@ class V2ControllerDeployment(BaseModel): instance_name: str = Field(description="Unique name for the bot instance") credentials_profile: str = Field(description="Name of the credentials profile to use") controllers_config: List[str] = Field(description="List of controller configuration files to use (without .yml extension)") - max_global_drawdown: Optional[float] = Field(default=None, description="Maximum allowed global drawdown percentage (0.0-1.0)") - max_controller_drawdown: Optional[float] = Field(default=None, description="Maximum allowed per-controller drawdown percentage (0.0-1.0)") + max_global_drawdown_quote: Optional[float] = Field(default=None, description="Maximum allowed global drawdown in quote usually USDT") + max_controller_drawdown_quote: Optional[float] = Field(default=None, description="Maximum allowed per-controller drawdown in quote usually USDT") image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") \ No newline at end of file diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 12aa2a4a..0483bc16 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -383,10 +383,10 @@ async def deploy_v2_controllers( } # Add optional drawdown parameters if provided - if deployment.max_global_drawdown is not None: - script_config_content["max_global_drawdown"] = deployment.max_global_drawdown - if deployment.max_controller_drawdown is not None: - script_config_content["max_controller_drawdown"] = deployment.max_controller_drawdown + if deployment.max_global_drawdown_quote is not None: + script_config_content["max_global_drawdown_quote"] = deployment.max_global_drawdown_quote + if deployment.max_controller_drawdown_quote is not None: + script_config_content["max_controller_drawdown_quote"] = deployment.max_controller_drawdown_quote # Save the script config to the scripts directory scripts_dir = os.path.join("conf", "scripts") From b174410369431f2cf4f8e2a93d90c6b61b5ec660 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 23:33:26 +0200 Subject: [PATCH 185/244] (feat) re-raise error --- services/accounts_service.py | 1 + 1 file changed, 1 insertion(+) diff --git a/services/accounts_service.py b/services/accounts_service.py index 79aee690..c6e220b6 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -263,6 +263,7 @@ async def add_credentials(self, account_name: str, connector_name: str, credenti except Exception as e: logger.error(f"Error adding connector credentials for account {account_name}: {e}") await self.delete_credentials(account_name, connector_name) + raise e @staticmethod def list_accounts(): From 39c0a389420e2c98e859794efce418be33878ffd Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 2 Jul 2025 23:33:40 +0200 Subject: [PATCH 186/244] (feat) improve docker routes --- routers/docker.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/routers/docker.py b/routers/docker.py index 6f9769b0..7b0f8287 100644 --- a/routers/docker.py +++ b/routers/docker.py @@ -21,11 +21,11 @@ async def is_docker_running(docker_service: DockerService = Depends(get_docker_s Returns: Dictionary indicating if Docker is running """ - return {"is_docker_running": docker_service.is_docker_running()} + return docker_service.is_docker_running() -@router.get("/available-images/{image_name}") -async def available_images(image_name: str, docker_service: DockerService = Depends(get_docker_service)): +@router.get("/available-images/") +async def available_images(image_name: str = None, docker_service: DockerService = Depends(get_docker_service)): """ Get available Docker images matching the specified name. @@ -37,8 +37,9 @@ async def available_images(image_name: str, docker_service: DockerService = Depe Dictionary with list of available image tags """ available_images = docker_service.get_available_images() - image_tags = [tag for image in available_images["images"] for tag in image.tags if image_name in tag] - return {"available_images": image_tags} + if image_name: + return [tag for image in available_images["images"] for tag in image.tags if image_name in tag] + return [tag for tag in available_images["images"]] @router.get("/active-containers") From bc0d7e84294838471e958af83d4be5028c7f83fd Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 01:06:41 +0200 Subject: [PATCH 187/244] (feat) improve trading and portfolio search endpoints --- models/trading.py | 26 +++++- routers/portfolio.py | 148 ++++++++++++++++++++++------------- routers/trading.py | 8 -- services/accounts_service.py | 6 +- 4 files changed, 122 insertions(+), 66 deletions(-) diff --git a/models/trading.py b/models/trading.py index b5057da9..a3449e3d 100644 --- a/models/trading.py +++ b/models/trading.py @@ -182,4 +182,28 @@ class TradeFilterRequest(TimeRangePaginationParams): account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") trading_pairs: Optional[List[str]] = Field(default=None, description="List of trading pairs to filter by") - trade_types: Optional[List[str]] = Field(default=None, description="List of trade types to filter by (BUY/SELL)") \ No newline at end of file + trade_types: Optional[List[str]] = Field(default=None, description="List of trade types to filter by (BUY/SELL)") + + +class PortfolioStateFilterRequest(BaseModel): + """Request model for filtering portfolio state""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + + +class PortfolioHistoryFilterRequest(TimeRangePaginationParams): + """Request model for filtering portfolio history""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + + +class PortfolioDistributionFilterRequest(BaseModel): + """Request model for filtering portfolio distribution""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") + + +class AccountsDistributionFilterRequest(BaseModel): + """Request model for filtering accounts distribution""" + account_names: Optional[List[str]] = Field(default=None, description="List of account names to filter by") + connector_names: Optional[List[str]] = Field(default=None, description="List of connector names to filter by") \ No newline at end of file diff --git a/routers/portfolio.py b/routers/portfolio.py index 937022b4..c34319e9 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -1,8 +1,14 @@ from typing import Dict, List, Optional from datetime import datetime -from fastapi import APIRouter, HTTPException, Depends, Query +from fastapi import APIRouter, HTTPException, Depends +from models.trading import ( + PortfolioStateFilterRequest, + PortfolioHistoryFilterRequest, + PortfolioDistributionFilterRequest, + AccountsDistributionFilterRequest +) from services.accounts_service import AccountsService from deps import get_accounts_service from models import PaginatedResponse @@ -10,76 +16,77 @@ router = APIRouter(tags=["Portfolio"], prefix="/portfolio") -@router.get("/state", response_model=Dict[str, Dict[str, List[Dict]]]) +@router.post("/state", response_model=Dict[str, Dict[str, List[Dict]]]) async def get_portfolio_state( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + filter_request: PortfolioStateFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get the current state of all or filtered accounts portfolio. Args: - account_names: Optional list of account names to filter by + filter_request: JSON payload with filtering criteria Returns: Dict containing account states with connector balances and token information """ all_states = accounts_service.get_accounts_state() - # If no filter, return all accounts - if not account_names: - return all_states + # Apply account name filter first + if filter_request.account_names: + filtered_states = {} + for account_name in filter_request.account_names: + if account_name in all_states: + filtered_states[account_name] = all_states[account_name] + all_states = filtered_states - # Filter by requested accounts - filtered_states = {} - for account_name in account_names: - if account_name in all_states: - filtered_states[account_name] = all_states[account_name] + # Apply connector filter if specified + if filter_request.connector_names: + for account_name, account_data in all_states.items(): + # Filter connectors directly (they are at the top level of account_data) + filtered_connectors = {} + for connector_name in filter_request.connector_names: + if connector_name in account_data: + filtered_connectors[connector_name] = account_data[connector_name] + # Replace account_data with only filtered connectors + all_states[account_name] = filtered_connectors - return filtered_states + return all_states -@router.get("/history", response_model=PaginatedResponse) +@router.post("/history", response_model=PaginatedResponse) async def get_portfolio_history( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), - limit: int = Query(default=100, ge=1, le=1000, description="Number of items per page"), - cursor: str = Query(default=None, description="Cursor for next page (ISO timestamp)"), - start_time: datetime = Query(default=None, description="Start time for filtering"), - end_time: datetime = Query(default=None, description="End time for filtering"), + filter_request: PortfolioHistoryFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get the historical state of all or filtered accounts portfolio with pagination. Args: - account_names: Optional list of account names to filter by - limit: Number of items per page (1-1000) - cursor: Cursor for pagination (ISO timestamp) - start_time: Start time for filtering results - end_time: End time for filtering results + filter_request: JSON payload with filtering criteria Returns: Paginated response with historical portfolio data """ try: - if not account_names: + if not filter_request.account_names: # Get history for all accounts data, next_cursor, has_more = await accounts_service.load_account_state_history( - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time + limit=filter_request.limit, + cursor=filter_request.cursor, + start_time=filter_request.start_time, + end_time=filter_request.end_time ) else: # Get history for specific accounts - need to aggregate all_data = [] - for account_name in account_names: + for account_name in filter_request.account_names: acc_data, _, _ = await accounts_service.get_account_state_history( account_name=account_name, - limit=limit, - cursor=cursor, - start_time=start_time, - end_time=end_time + limit=filter_request.limit, + cursor=filter_request.cursor, + start_time=filter_request.start_time, + end_time=filter_request.end_time ) all_data.extend(acc_data) @@ -87,21 +94,33 @@ async def get_portfolio_history( all_data.sort(key=lambda x: x.get("timestamp", ""), reverse=True) # Apply limit - data = all_data[:limit] - has_more = len(all_data) > limit + data = all_data[:filter_request.limit] + has_more = len(all_data) > filter_request.limit next_cursor = data[-1]["timestamp"] if data and has_more else None + # Apply connector filter to the data if specified + if filter_request.connector_names: + for item in data: + for account_name, account_data in item.items(): + if isinstance(account_data, dict) and "connectors" in account_data: + filtered_connectors = {} + for connector_name in filter_request.connector_names: + if connector_name in account_data["connectors"]: + filtered_connectors[connector_name] = account_data["connectors"][connector_name] + account_data["connectors"] = filtered_connectors + return PaginatedResponse( data=data, pagination={ - "limit": limit, + "limit": filter_request.limit, "has_more": has_more, "next_cursor": next_cursor, - "current_cursor": cursor, + "current_cursor": filter_request.cursor, "filters": { - "account_names": account_names, - "start_time": start_time.isoformat() if start_time else None, - "end_time": end_time.isoformat() if end_time else None + "account_names": filter_request.account_names, + "connector_names": filter_request.connector_names, + "start_time": filter_request.start_time.isoformat() if filter_request.start_time else None, + "end_time": filter_request.end_time.isoformat() if filter_request.end_time else None } } ) @@ -109,26 +128,26 @@ async def get_portfolio_history( raise HTTPException(status_code=500, detail=str(e)) -@router.get("/distribution") +@router.post("/distribution") async def get_portfolio_distribution( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + filter_request: PortfolioDistributionFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get portfolio distribution by tokens with percentages across all or filtered accounts. Args: - account_names: Optional list of account names to filter by + filter_request: JSON payload with filtering criteria Returns: Dictionary with token distribution including percentages, values, and breakdown by accounts/connectors """ - if not account_names: + if not filter_request.account_names: # Get distribution for all accounts return accounts_service.get_portfolio_distribution() - elif len(account_names) == 1: + elif len(filter_request.account_names) == 1: # Single account - use existing method - return accounts_service.get_portfolio_distribution(account_names[0]) + return accounts_service.get_portfolio_distribution(filter_request.account_names[0]) else: # Multiple accounts - need to aggregate aggregated_distribution = { @@ -138,7 +157,7 @@ async def get_portfolio_distribution( "accounts": {} } - for account_name in account_names: + for account_name in filter_request.account_names: account_dist = accounts_service.get_portfolio_distribution(account_name) # Skip if account doesn't exist or has error @@ -175,16 +194,16 @@ async def get_portfolio_distribution( return aggregated_distribution -@router.get("/accounts-distribution") +@router.post("/accounts-distribution") async def get_accounts_distribution( - account_names: Optional[List[str]] = Query(default=None, description="Filter by account names"), + filter_request: AccountsDistributionFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get portfolio distribution by accounts with percentages. Args: - account_names: Optional list of account names to filter by + filter_request: JSON payload with filtering criteria Returns: Dictionary with account distribution including percentages, values, and breakdown by connectors @@ -192,7 +211,7 @@ async def get_accounts_distribution( all_distribution = accounts_service.get_account_distribution() # If no filter, return all accounts - if not account_names: + if not filter_request.account_names: return all_distribution # Filter the distribution by requested accounts @@ -202,11 +221,34 @@ async def get_accounts_distribution( "account_count": 0 } - for account_name in account_names: + for account_name in filter_request.account_names: if account_name in all_distribution.get("accounts", {}): filtered_distribution["accounts"][account_name] = all_distribution["accounts"][account_name] filtered_distribution["total_value"] += all_distribution["accounts"][account_name].get("total_value", 0) + # Apply connector filter if specified + if filter_request.connector_names: + for account_name, account_data in filtered_distribution["accounts"].items(): + if "connectors" in account_data: + filtered_connectors = {} + for connector_name in filter_request.connector_names: + if connector_name in account_data["connectors"]: + filtered_connectors[connector_name] = account_data["connectors"][connector_name] + account_data["connectors"] = filtered_connectors + + # Recalculate account total after connector filtering + new_total = sum( + conn_data.get("total_balance_in_usd", 0) + for conn_data in filtered_connectors.values() + ) + account_data["total_value"] = new_total + + # Recalculate total_value after connector filtering + filtered_distribution["total_value"] = sum( + acc_data.get("total_value", 0) + for acc_data in filtered_distribution["accounts"].values() + ) + # Recalculate percentages total_value = filtered_distribution["total_value"] if total_value > 0: diff --git a/routers/trading.py b/routers/trading.py index f17a4435..8bbad64f 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -70,15 +70,8 @@ async def place_trade(trade_request: TradeRequest, except Exception as e: raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") - -class CancelOrderRequest(BaseModel): - """Request model for cancelling an order""" - trading_pair: str - - @router.post("/{account_name}/{connector_name}/orders/{client_order_id}/cancel") async def cancel_order(account_name: str, connector_name: str, client_order_id: str, - request: CancelOrderRequest, accounts_service: AccountsService = Depends(get_accounts_service)): """ Cancel a specific order by its client order ID. @@ -100,7 +93,6 @@ async def cancel_order(account_name: str, connector_name: str, client_order_id: cancelled_order_id = await accounts_service.cancel_order( account_name=account_name, connector_name=connector_name, - trading_pair=request.trading_pair, client_order_id=client_order_id ) return {"message": f"Order {cancelled_order_id} cancelled successfully"} diff --git a/services/accounts_service.py b/services/accounts_service.py index c6e220b6..b7985c81 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -823,15 +823,13 @@ async def get_active_orders(self, account_name: str, connector_name: str) -> Dic connector = await self.get_connector_instance(account_name, connector_name) return {order_id: order.to_json() for order_id, order in connector.in_flight_orders.items()} - async def cancel_order(self, account_name: str, connector_name: str, - trading_pair: str, client_order_id: str) -> str: + async def cancel_order(self, account_name: str, connector_name: str, client_order_id: str) -> str: """ Cancel an active order. Args: account_name: Name of the account connector_name: Name of the connector - trading_pair: Trading pair client_order_id: Client order ID to cancel Returns: @@ -840,7 +838,7 @@ async def cancel_order(self, account_name: str, connector_name: str, connector = await self.get_connector_instance(account_name, connector_name) try: - result = connector.cancel(trading_pair=trading_pair, client_order_id=client_order_id) + result = connector.cancel(trading_pair="NA", client_order_id=client_order_id) logger.info(f"Cancelled order {client_order_id} on {connector_name} (Account: {account_name})") return result except Exception as e: From b939f6c838d08f42fadb3b4917ec88e55628244c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 01:23:03 +0200 Subject: [PATCH 188/244] (feat) add support for controllers and positions in archived bots --- routers/archived_bots.py | 41 +++++++++++++++++++++++++++++ utils/hummingbot_database_reader.py | 14 +++++++++- 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/routers/archived_bots.py b/routers/archived_bots.py index 2ca19d2a..a03fae12 100644 --- a/routers/archived_bots.py +++ b/routers/archived_bots.py @@ -59,12 +59,14 @@ async def get_database_summary(db_path: str): orders = db.get_orders() trades = db.get_trade_fills() executors = db.get_executors_data() + positions = db.get_positions() return { "db_path": db_path, "total_orders": len(orders), "total_trades": len(trades), "total_executors": len(executors), + "total_positions": len(positions), "trading_pairs": orders["symbol"].unique().tolist() if len(orders) > 0 else [], "exchanges": orders["market"].unique().tolist() if len(orders) > 0 else [], } @@ -228,3 +230,42 @@ async def get_database_executors(db_path: str): } except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching executors: {str(e)}") + + +@router.get("/{db_path:path}/positions") +async def get_database_positions( + db_path: str, + limit: int = Query(default=100, description="Limit number of positions returned"), + offset: int = Query(default=0, description="Offset for pagination") +): + """ + Get position data from a database. + + Args: + db_path: Full path to the database file + limit: Maximum number of positions to return + offset: Offset for pagination + + Returns: + List of positions with pagination info + """ + try: + db = HummingbotDatabase(db_path) + positions = db.get_positions() + + # Apply pagination + total_positions = len(positions) + positions_page = positions.iloc[offset:offset + limit] + + return { + "db_path": db_path, + "positions": positions_page.fillna(0).to_dict('records'), + "pagination": { + "total": total_positions, + "limit": limit, + "offset": offset, + "has_more": offset + limit < total_positions + } + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") diff --git a/utils/hummingbot_database_reader.py b/utils/hummingbot_database_reader.py index 520e8dc5..5fedd6ee 100644 --- a/utils/hummingbot_database_reader.py +++ b/utils/hummingbot_database_reader.py @@ -34,14 +34,17 @@ def status(self): order_status_status = self._get_table_status(self.get_order_status) executors_status = self._get_table_status(self.get_executors_data) controller_status = self._get_table_status(self.get_controllers_data) + positions_status = self._get_table_status(self.get_positions) general_status = all(status == "Correct" for status in - [trade_fill_status, orders_status, order_status_status, executors_status, controller_status]) + [trade_fill_status, orders_status, order_status_status, executors_status, controller_status, positions_status]) status = {"db_name": self.db_name, "db_path": self.db_path, "trade_fill": trade_fill_status, "orders": orders_status, "order_status": order_status_status, "executors": executors_status, + "controllers": controller_status, + "positions": positions_status, "general_status": general_status } return status @@ -87,6 +90,15 @@ def get_controllers_data(self) -> pd.DataFrame: controllers = pd.read_sql_query(text(query), session.connection()) return controllers + def get_positions(self) -> pd.DataFrame: + with self.session_maker() as session: + query = "SELECT * FROM Position" + positions = pd.read_sql_query(text(query), session.connection()) + # Convert decimal fields from stored format (divide by 1e6) + decimal_cols = ["volume_traded_quote", "amount", "breakeven_price", "unrealized_pnl_quote", "cum_fees_quote"] + positions[decimal_cols] = positions[decimal_cols] / 1e6 + return positions + def calculate_trade_based_performance(self) -> pd.DataFrame: """ Calculate trade-based performance metrics using vectorized pandas operations. From 983eab13a394b90ed41169f927958e8f2f5a68f3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 01:25:48 +0200 Subject: [PATCH 189/244] (feat) add controllers endpoint --- routers/archived_bots.py | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) diff --git a/routers/archived_bots.py b/routers/archived_bots.py index a03fae12..a15825dd 100644 --- a/routers/archived_bots.py +++ b/routers/archived_bots.py @@ -1,9 +1,8 @@ -from typing import List, Dict, Any, Optional +from typing import List, Optional from fastapi import APIRouter, HTTPException, Query from utils.file_system import fs_util -from utils.hummingbot_database_reader import HummingbotDatabase, PerformanceDataSource -from hummingbot.strategy_v2.backtesting.backtesting_engine_base import BacktestingEngineBase +from utils.hummingbot_database_reader import HummingbotDatabase router = APIRouter(tags=["Archived Bots"], prefix="/archived-bots") @@ -60,6 +59,7 @@ async def get_database_summary(db_path: str): trades = db.get_trade_fills() executors = db.get_executors_data() positions = db.get_positions() + controllers = db.get_controllers_data() return { "db_path": db_path, @@ -67,6 +67,7 @@ async def get_database_summary(db_path: str): "total_trades": len(trades), "total_executors": len(executors), "total_positions": len(positions), + "total_controllers": len(controllers), "trading_pairs": orders["symbol"].unique().tolist() if len(orders) > 0 else [], "exchanges": orders["market"].unique().tolist() if len(orders) > 0 else [], } @@ -269,3 +270,27 @@ async def get_database_positions( } except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") + + +@router.get("/{db_path:path}/controllers") +async def get_database_controllers(db_path: str): + """ + Get controller data from a database. + + Args: + db_path: Full path to the database file + + Returns: + List of controllers that were running with their configurations + """ + try: + db = HummingbotDatabase(db_path) + controllers = db.get_controllers_data() + + return { + "db_path": db_path, + "controllers": controllers.fillna(0).to_dict('records'), + "total": len(controllers) + } + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error fetching controllers: {str(e)}") From c503075f6048b2bac92bcac9da60bfef1b9e3f0d Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 18:24:52 +0200 Subject: [PATCH 190/244] (feat) update compose files --- Dockerfile | 10 +++++----- docker-compose.yml | 20 ++++++++++---------- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Dockerfile b/Dockerfile index 81dc0eff..e1766153 100644 --- a/Dockerfile +++ b/Dockerfile @@ -27,10 +27,10 @@ RUN apt-get update && \ && rm -rf /var/lib/apt/lists/* # Copy the conda environment from builder -COPY --from=builder /opt/conda/envs/backend-api /opt/conda/envs/backend-api +COPY --from=builder /opt/conda/envs/hummingbot-api /opt/conda/envs/hummingbot-api # Set the working directory -WORKDIR /backend-api +WORKDIR /hummingbot-api # Copy only necessary application files COPY main.py config.py deps.py ./ @@ -43,14 +43,14 @@ COPY bots/controllers ./bots/controllers COPY bots/scripts ./bots/scripts # Create necessary directories -RUN mkdir -p bots/instances bots/conf bots/credentials bots/data +RUN mkdir -p bots/instances bots/conf bots/credentials bots/data bots/archived # Expose port EXPOSE 8000 # Set environment variables to ensure conda env is used -ENV PATH="/opt/conda/envs/backend-api/bin:$PATH" -ENV CONDA_DEFAULT_ENV=backend-api +ENV PATH="/opt/conda/envs/hummingbot-api/bin:$PATH" +ENV CONDA_DEFAULT_ENV=hummingbot-api # Run the application ENTRYPOINT ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/docker-compose.yml b/docker-compose.yml index 6ce0d47a..8d64d790 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,19 +1,19 @@ services: - backend-api: - container_name: backend-api - image: hummingbot/backend-api:latest + hummingbot-api: + container_name: hummingbot-api + image: hummingbot/hummingbot-api:latest ports: - "8000:8000" volumes: - - ./bots:/backend-api/bots + - ./bots:/hummingbot-api/bots - /var/run/docker.sock:/var/run/docker.sock env_file: - .env environment: # Override specific values for Docker networking - BROKER_HOST=emqx - - DATABASE_URL=postgresql+asyncpg://hbot:backend-api@postgres:5432/backend_api - - BOTS_PATH=/backend-api/bots + - DATABASE_URL=postgresql+asyncpg://hbot:hummingbot-api@postgres:5432/hummingbot_api + - BOTS_PATH=/hummingbot-api/bots networks: - emqx-bridge depends_on: @@ -50,13 +50,13 @@ services: timeout: 25s retries: 5 postgres: - container_name: backend-postgres + container_name: hummingbot-postgres image: postgres:15 restart: unless-stopped environment: - - POSTGRES_DB=backend_api + - POSTGRES_DB=hummingbot_api - POSTGRES_USER=hbot - - POSTGRES_PASSWORD=backend-api + - POSTGRES_PASSWORD=hummingbot-api volumes: - postgres-data:/var/lib/postgresql/data ports: @@ -64,7 +64,7 @@ services: networks: - emqx-bridge healthcheck: - test: ["CMD-SHELL", "pg_isready -U hbot -d backend_api"] + test: ["CMD-SHELL", "pg_isready -U hbot -d hummingbot_api"] interval: 10s timeout: 5s retries: 5 From c89a8d891cb72c7cfa30157cc8779e247737db13 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 18:25:04 +0200 Subject: [PATCH 191/244] (feat) update wf --- .github/workflows/docker_buildx_workflow.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/docker_buildx_workflow.yml b/.github/workflows/docker_buildx_workflow.yml index 156388d9..7ba8f9c6 100644 --- a/.github/workflows/docker_buildx_workflow.yml +++ b/.github/workflows/docker_buildx_workflow.yml @@ -1,4 +1,4 @@ -name: Backend-API Docker Buildx Workflow +name: Hummingbot-API Docker Buildx Workflow on: pull_request: @@ -36,7 +36,7 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: hummingbot/backend-api:development + tags: hummingbot/hummingbot-api:development - name: Build and push Latest Image if: github.base_ref == 'main' @@ -46,7 +46,7 @@ jobs: file: ./Dockerfile platforms: linux/amd64,linux/arm64 push: true - tags: hummingbot/backend-api:latest + tags: hummingbot/hummingbot-api:latest build_release: if: github.event_name == 'release' @@ -77,4 +77,4 @@ jobs: context: . platforms: linux/amd64,linux/arm64 push: true - tags: hummingbot/backend-api:${{ steps.get_tag.outputs.VERSION }} + tags: hummingbot/hummingbot-api:${{ steps.get_tag.outputs.VERSION }} From ac39e449efa92ede433e3a69e578331dc8654599 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 18:26:28 +0200 Subject: [PATCH 192/244] (feat) update connection to hummingbot api --- database/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database/connection.py b/database/connection.py index 172e221e..6cca9bb8 100644 --- a/database/connection.py +++ b/database/connection.py @@ -29,7 +29,7 @@ def __init__(self, database_url: str): echo_pool=False, # Set to True for connection pool logging # Connection arguments for asyncpg connect_args={ - "server_settings": {"application_name": "backend-api"}, + "server_settings": {"application_name": "hummingbot-api"}, "command_timeout": 60, } ) From 8d181ded8a86ef8f36657da7fd090ad0434ff7b9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 18:26:43 +0200 Subject: [PATCH 193/244] (feat) update connection to hummingbot api --- utils/mqtt_manager.py | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/utils/mqtt_manager.py b/utils/mqtt_manager.py index 8dbc0ee5..a29ec63c 100644 --- a/utils/mqtt_manager.py +++ b/utils/mqtt_manager.py @@ -52,7 +52,7 @@ def __init__(self, host: str, port: int, username: str, password: str): ("hbot/+/hb", 1), # Heartbeats ("hbot/+/performance", 1), # Performance metrics ("hbot/+/external/event/+", 1), # External events - ("backend-api/response/+", 1), # RPC responses to our reply_to topics + ("hummingbot-api/response/+", 1), # RPC responses to our reply_to topics ] if username: @@ -63,7 +63,7 @@ def __init__(self, host: str, port: int, username: str, password: str): @asynccontextmanager async def _get_client(self): """Get MQTT client for a single connection attempt.""" - client_id = f"backend-api-{int(time.time())}" + client_id = f"hummingbot-api-{int(time.time())}" # Create client with credentials if provided if self.username and self.password: @@ -110,8 +110,8 @@ async def _process_message(self, message): try: topic = str(message.topic) - # Check if this is an RPC response to our backend-api - if topic.startswith("backend-api/response/"): + # Check if this is an RPC response to our hummingbot-api + if topic.startswith("hummingbot-api/response/"): await self._handle_rpc_response(topic, message) return @@ -230,7 +230,7 @@ async def _handle_external_event(self, bot_id: str, channel: str, data: Any): event_type = channel.split("/")[-1] async def _handle_rpc_response(self, topic: str, message): - """Handle RPC responses on backend-api/response/* topics.""" + """Handle RPC responses on hummingbot-api/response/* topics.""" try: # Parse the response data try: @@ -310,7 +310,7 @@ async def publish_command_and_wait( # Generate unique reply_to topic timestamp = int(time.time() * 1000) - reply_to_topic = f"backend-api/response/{timestamp}" + reply_to_topic = f"hummingbot-api/response/{timestamp}" # Create a future to track the response using the reply_to topic as key future = asyncio.Future() @@ -366,8 +366,8 @@ async def _publish_command_with_reply_to( "timestamp": int(time.time() * 1000), # Milliseconds "reply_to": reply_to, # Custom reply_to topic "msg_id": int(time.time() * 1000), - "node_id": "backend-api", - "agent": "backend-api", + "node_id": "hummingbot-api", + "agent": "hummingbot-api", "properties": {}, }, "data": data or {}, @@ -405,10 +405,10 @@ async def publish_command(self, bot_id: str, command: str, data: Dict[str, Any], message = { "header": { "timestamp": int(time.time() * 1000), # Milliseconds - "reply_to": f"backend-api-response-{int(time.time() * 1000)}", # Unique response topic + "reply_to": f"hummingbot-api-response-{int(time.time() * 1000)}", # Unique response topic "msg_id": int(time.time() * 1000), - "node_id": "backend-api", - "agent": "backend-api", + "node_id": "hummingbot-api", + "agent": "hummingbot-api", "properties": {}, }, "data": data or {}, From e596d5f6ce751dd1eb433abf0e6ad7cb19ff6deb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 3 Jul 2025 18:27:05 +0200 Subject: [PATCH 194/244] (feat) final env updates --- Makefile | 18 ++++---- README.md | 107 +++++++++++++++++++++++++++++------------------- config.py | 2 +- environment.yml | 2 +- main.py | 2 +- run.sh | 2 +- setup.sh | 2 +- 7 files changed, 79 insertions(+), 56 deletions(-) diff --git a/Makefile b/Makefile index b608e236..db10e0cb 100644 --- a/Makefile +++ b/Makefile @@ -5,8 +5,8 @@ .PHONY: uninstall .PHONY: install .PHONY: install-pre-commit -.PHONY: docker_build -.PHONY: docker_run +.PHONY: build +.PHONY: deploy detect_conda_bin := $(shell bash -c 'if [ "${CONDA_EXE} " == " " ]; then \ @@ -26,25 +26,25 @@ run: uvicorn main:app --reload uninstall: - conda env remove -n backend-api -y + conda env remove -n hummingbot-api -y install: - if conda env list | grep -q '^backend-api '; then \ + if conda env list | grep -q '^hummingbot-api '; then \ echo "Environment already exists."; \ else \ conda env create -f environment.yml; \ fi - conda activate backend-api + conda activate hummingbot-api $(MAKE) install-pre-commit install-pre-commit: - /bin/bash -c 'source "${CONDA_BIN}/activate" backend-api && \ + /bin/bash -c 'source "${CONDA_BIN}/activate" hummingbot-api && \ if ! conda list pre-commit | grep pre-commit &> /dev/null; then \ pip install pre-commit; \ fi && pre-commit install' -docker_build: - docker build -t hummingbot/backend-api:latest . +build: + docker build -t hummingbot/hummingbot-api:latest . -docker_run: +deploy: docker compose up -d diff --git a/README.md b/README.md index 71e04853..5e5bd8dd 100644 --- a/README.md +++ b/README.md @@ -42,8 +42,8 @@ Enables real-time communication with trading bots: 1. **Clone the repository** ```bash - git clone https://github.com/hummingbot/backend-api.git - cd backend-api + git clone https://github.com/hummingbot/hummingbot-api.git + cd hummingbot-api ``` 2. **Make setup script executable and run it** @@ -111,67 +111,90 @@ Once the API is running, you can access it at `http://localhost:8000` The Hummingbot API is organized into several functional routers: ### 🐳 Docker Management (`/docker`) -- Check running containers and images -- Pull new Docker images +- Check Docker daemon status and health +- Pull new Docker images with async support - Start, stop, and remove containers -- Monitor container status and health +- Monitor active and exited containers - Clean up exited containers - Archive container data locally or to S3 +- Track image pull status and progress ### 💳 Account Management (`/accounts`) - Create and delete trading accounts - Add/remove exchange credentials -- Monitor account states and balances -- View portfolio distribution -- Track positions and funding payments +- List available credentials per account +- Basic account configuration + +### 🔌 Connector Discovery (`/connectors`) +**Provides exchange connector information and configuration** +- List available exchange connectors +- Get connector configuration requirements +- Retrieve trading rules and constraints +- Query supported order types per connector + +### 📊 Portfolio Management (`/portfolio`) +**Centralized portfolio tracking and analytics** +- **Real-time Portfolio State**: Current balances across all accounts +- **Portfolio History**: Time-series data with cursor-based pagination +- **Token Distribution**: Aggregate holdings by token across exchanges +- **Account Distribution**: Percentage-based portfolio allocation analysis +- **Advanced Filtering**: Filter by account names and connectors ### 💹 Trading Operations (`/trading`) -**Enhanced with cursor-based pagination and comprehensive order/trade management** -- **Order Management**: Place, cancel, and monitor orders across all exchanges -- **Position Tracking**: Real-time positions with PnL, margin, and funding data -- **Historical Data**: Paginated order history with advanced filtering +**Enhanced with POST-based filtering and comprehensive order/trade management** +- **Order Placement**: Execute trades with advanced order types +- **Order Cancellation**: Cancel specific orders by ID +- **Position Tracking**: Real-time perpetual positions with PnL data - **Active Orders**: Live order monitoring from connector in-flight orders -- **Trade History**: Complete trade execution records with filtering -- **Funding Payments**: Historical funding payment tracking for perpetual positions -- **Portfolio Monitoring**: Real-time balance and portfolio state tracking +- **Order History**: Paginated historical orders with advanced filtering +- **Trade History**: Complete execution records with filtering +- **Funding Payments**: Historical funding payment tracking for perpetuals - **Position Modes**: Configure HEDGE/ONEWAY modes for perpetual trading - **Leverage Management**: Set and adjust leverage per trading pair ### 🤖 Bot Orchestration (`/bot-orchestration`) -- Discover and manage active bots -- Deploy new Hummingbot instances -- Start/stop automated strategies -- Monitor bot performance in real-time +- Monitor bot status and MQTT connectivity +- Deploy V2 scripts and controllers +- Start/stop bots with configurable parameters +- Stop and archive bots with background task support +- Retrieve bot performance history +- Real-time bot status monitoring ### 📋 Strategy Management -- **Controllers** (`/controllers`): Manage advanced strategy controllers +- **Controllers** (`/controllers`): Manage V2 strategy controllers + - CRUD operations on controller files + - Controller configuration management + - Bot-specific controller configurations + - Template retrieval for new configs - **Scripts** (`/scripts`): Handle traditional Hummingbot scripts -- Create, edit, and remove strategy files -- Configure strategy parameters + - CRUD operations on script files + - Script configuration management + - Configuration templates ### 📊 Market Data (`/market-data`) -**Completely enhanced with professional order book analysis and real-time data** -- **Price Discovery**: Real-time prices for multiple trading pairs, funding rates, mark/index prices -- **Order Book Analysis**: Live snapshots, price impact analysis, liquidity analysis, VWAP calculations -- **Historical Data**: Real-time and historical candle data with configurable intervals -- **Feed Management**: Active feed monitoring, automatic cleanup, lifecycle management +**Professional market data analysis and real-time feeds** +- **Price Discovery**: Real-time prices, funding rates, mark/index prices +- **Candle Data**: Real-time and historical candles with multiple intervals +- **Order Book Analysis**: + - Live order book snapshots + - Price impact calculations + - Volume queries at specific price levels + - VWAP (Volume-Weighted Average Price) calculations +- **Feed Management**: Active feed monitoring with automatic cleanup ### 🔄 Backtesting (`/backtesting`) -- Test strategies against historical data -- Analyze strategy performance -- Optimize parameters - -### 📈 Analytics (`/archived-bots`) -- Analyze performance of stopped bots -- Generate comprehensive reports -- Review historical trades and orders -- Extract insights from past strategies - -### 🗄️ Database Management (`/databases`) -- List and manage bot databases -- Query trading data across multiple bots -- Analyze historical performance -- Database health monitoring +- Run strategy backtests against historical data +- Support for controller configurations +- Customizable trade costs and resolution + +### 📈 Archived Bot Analytics (`/archived-bots`) +**Comprehensive analysis of stopped bot performance** +- List and discover archived bot databases +- Performance metrics and trade analysis +- Historical order and trade retrieval +- Position and executor data extraction +- Controller configuration recovery +- Support for both V1 and V2 bot architectures ## Configuration diff --git a/config.py b/config.py index 0e1a2597..c45fe104 100644 --- a/config.py +++ b/config.py @@ -18,7 +18,7 @@ class DatabaseSettings(BaseSettings): """Database configuration.""" url: str = Field( - default="postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api", + default="postgresql+asyncpg://hbot:hummingbot-api@localhost:5432/backend_api", description="Database connection URL" ) diff --git a/environment.yml b/environment.yml index e61f71e2..7fc7b209 100644 --- a/environment.yml +++ b/environment.yml @@ -1,4 +1,4 @@ -name: backend-api +name: hummingbot-api channels: - conda-forge - defaults diff --git a/main.py b/main.py index 479dd552..67cf2847 100644 --- a/main.py +++ b/main.py @@ -145,7 +145,7 @@ async def lifespan(app: FastAPI): allow_headers=["*"], ) -logfire.configure(send_to_logfire="if-token-present", environment=settings.app.logfire_environment, service_name="backend-api") +logfire.configure(send_to_logfire="if-token-present", environment=settings.app.logfire_environment, service_name="hummingbot-api") logfire.instrument_fastapi(app) def auth_user( diff --git a/run.sh b/run.sh index b07ef3d8..c6b36427 100755 --- a/run.sh +++ b/run.sh @@ -10,7 +10,7 @@ if [[ "$1" == "--dev" ]]; then # Activate conda environment and run with uvicorn docker compose up emqx postgres -d source "$(conda info --base)/etc/profile.d/conda.sh" - conda activate backend-api + conda activate hummingbot-api uvicorn main:app --reload else echo "Running with Docker Compose..." diff --git a/setup.sh b/setup.sh index c4e942b9..ab133258 100755 --- a/setup.sh +++ b/setup.sh @@ -39,7 +39,7 @@ BROKER_HOST="localhost" BROKER_PORT="1883" BROKER_USERNAME="admin" BROKER_PASSWORD="password" -DATABASE_URL="postgresql+asyncpg://hbot:backend-api@localhost:5432/backend_api" +DATABASE_URL="postgresql+asyncpg://hbot:hummingbot-api@localhost:5432/backend_api" CLEANUP_INTERVAL="300" FEED_TIMEOUT="600" AWS_API_KEY="" From c2fbca6ef77b43eba5a275e7fdbf4e0372c42b19 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 4 Jul 2025 00:14:26 +0200 Subject: [PATCH 195/244] (feat) move defaults to hummingbot api --- config.py | 2 +- setup.sh | 2 +- utils/connector_manager.py | 6 +++--- utils/security.py | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/config.py b/config.py index c45fe104..62c90f81 100644 --- a/config.py +++ b/config.py @@ -18,7 +18,7 @@ class DatabaseSettings(BaseSettings): """Database configuration.""" url: str = Field( - default="postgresql+asyncpg://hbot:hummingbot-api@localhost:5432/backend_api", + default="postgresql+asyncpg://hbot:hummingbot-api@localhost:5432/hummingbot_api", description="Database connection URL" ) diff --git a/setup.sh b/setup.sh index ab133258..6b491e3c 100755 --- a/setup.sh +++ b/setup.sh @@ -39,7 +39,7 @@ BROKER_HOST="localhost" BROKER_PORT="1883" BROKER_USERNAME="admin" BROKER_PASSWORD="password" -DATABASE_URL="postgresql+asyncpg://hbot:hummingbot-api@localhost:5432/backend_api" +DATABASE_URL="postgresql+asyncpg://hbot:hummingbot-api@localhost:5432/hummingbot_api" CLEANUP_INTERVAL="300" FEED_TIMEOUT="600" AWS_API_KEY="" diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 2a8f465a..3a956bf2 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -14,7 +14,7 @@ from hummingbot.core.data_type.common import PositionMode from hummingbot.core.utils.async_utils import safe_ensure_future -from utils.backend_api_config_adapter import BackendAPIConfigAdapter +from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter from utils.file_system import FileSystemUtil, fs_util from utils.security import BackendAPISecurity @@ -112,7 +112,7 @@ def get_connector_config_map(connector_name: str): :param connector_name: The name of the connector. :return: The connector config map. """ - connector_config = BackendAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) + connector_config = HummingbotAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) return [key for key in connector_config.hb_config.__fields__.keys() if key != "connector"] async def update_connector_keys(self, account_name: str, connector_name: str, keys: dict): @@ -125,7 +125,7 @@ async def update_connector_keys(self, account_name: str, connector_name: str, ke :return: The updated connector instance. """ BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) - connector_config = BackendAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) + connector_config = HummingbotAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) for key, value in keys.items(): setattr(connector_config, key, value) diff --git a/utils/security.py b/utils/security.py index f715c54d..095a7fdd 100644 --- a/utils/security.py +++ b/utils/security.py @@ -11,7 +11,7 @@ from hummingbot.client.config.security import Security from config import settings -from utils.backend_api_config_adapter import BackendAPIConfigAdapter +from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter from utils.file_system import fs_util @@ -41,11 +41,11 @@ def decrypt_connector_config(cls, file_path: Path): cls._secure_configs[connector_name] = cls.load_connector_config_map_from_file(file_path) @classmethod - def load_connector_config_map_from_file(cls, yml_path: Path) -> BackendAPIConfigAdapter: + def load_connector_config_map_from_file(cls, yml_path: Path) -> HummingbotAPIConfigAdapter: config_data = read_yml_file(yml_path) connector_name = connector_name_from_file(yml_path) hb_config = get_connector_hb_config(connector_name).model_validate(config_data) - config_map = BackendAPIConfigAdapter(hb_config) + config_map = HummingbotAPIConfigAdapter(hb_config) config_map.decrypt_all_secure_data() return config_map From 378ab1f4af5cf488d27f92871541fcd960705b77 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 4 Jul 2025 00:14:37 +0200 Subject: [PATCH 196/244] (feat) rename config adapter --- ...d_api_config_adapter.py => hummingbot_api_config_adapter.py} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename utils/{backend_api_config_adapter.py => hummingbot_api_config_adapter.py} (98%) diff --git a/utils/backend_api_config_adapter.py b/utils/hummingbot_api_config_adapter.py similarity index 98% rename from utils/backend_api_config_adapter.py rename to utils/hummingbot_api_config_adapter.py index 7e49da31..4dce67a3 100644 --- a/utils/backend_api_config_adapter.py +++ b/utils/hummingbot_api_config_adapter.py @@ -4,7 +4,7 @@ from pydantic import SecretStr -class BackendAPIConfigAdapter(ClientConfigAdapter): +class HummingbotAPIConfigAdapter(ClientConfigAdapter): def _encrypt_secrets(self, conf_dict: Dict[str, Any]): from utils.security import BackendAPISecurity for attr, value in conf_dict.items(): From 8be20c5b1aa559b538865e96b907f676b06a1732 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 4 Jul 2025 01:31:03 +0200 Subject: [PATCH 197/244] (feat) fix history endpoint --- routers/portfolio.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/routers/portfolio.py b/routers/portfolio.py index c34319e9..d7486550 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -69,13 +69,17 @@ async def get_portfolio_history( Paginated response with historical portfolio data """ try: + # Convert integer timestamps to datetime objects + start_time_dt = datetime.fromtimestamp(filter_request.start_time / 1000) if filter_request.start_time else None + end_time_dt = datetime.fromtimestamp(filter_request.end_time / 1000) if filter_request.end_time else None + if not filter_request.account_names: # Get history for all accounts data, next_cursor, has_more = await accounts_service.load_account_state_history( limit=filter_request.limit, cursor=filter_request.cursor, - start_time=filter_request.start_time, - end_time=filter_request.end_time + start_time=start_time_dt, + end_time=end_time_dt ) else: # Get history for specific accounts - need to aggregate @@ -85,8 +89,8 @@ async def get_portfolio_history( account_name=account_name, limit=filter_request.limit, cursor=filter_request.cursor, - start_time=filter_request.start_time, - end_time=filter_request.end_time + start_time=start_time_dt, + end_time=end_time_dt ) all_data.extend(acc_data) @@ -119,8 +123,8 @@ async def get_portfolio_history( "filters": { "account_names": filter_request.account_names, "connector_names": filter_request.connector_names, - "start_time": filter_request.start_time.isoformat() if filter_request.start_time else None, - "end_time": filter_request.end_time.isoformat() if filter_request.end_time else None + "start_time": filter_request.start_time, + "end_time": filter_request.end_time } } ) From eff2cf977a92de2826498926ec73775e18a734d2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 4 Jul 2025 01:31:43 +0200 Subject: [PATCH 198/244] (feat) acommodate dump account state to use same timestamp for dumping --- database/repositories/account_repository.py | 18 +++++++++++++----- services/accounts_service.py | 10 +++++++--- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/database/repositories/account_repository.py b/database/repositories/account_repository.py index 5f89c8b6..a45b69ca 100644 --- a/database/repositories/account_repository.py +++ b/database/repositories/account_repository.py @@ -15,14 +15,22 @@ class AccountRepository: def __init__(self, session: AsyncSession): self.session = session - async def save_account_state(self, account_name: str, connector_name: str, tokens_info: List[Dict]) -> AccountState: + async def save_account_state(self, account_name: str, connector_name: str, tokens_info: List[Dict], + snapshot_timestamp: Optional[datetime] = None) -> AccountState: """ Save account state with token information to the database. + If snapshot_timestamp is provided, use it instead of server default. """ - account_state = AccountState( - account_name=account_name, - connector_name=connector_name - ) + account_state_data = { + "account_name": account_name, + "connector_name": connector_name + } + + # If a specific timestamp is provided, use it instead of server default + if snapshot_timestamp: + account_state_data["timestamp"] = snapshot_timestamp + + account_state = AccountState(**account_state_data) self.session.add(account_state) await self.session.flush() # Get the ID diff --git a/services/accounts_service.py b/services/accounts_service.py index b7985c81..e1a92ae1 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,6 +1,6 @@ import asyncio import logging -from datetime import datetime +from datetime import datetime, timezone # Create module-specific logger logger = logging.getLogger(__name__) @@ -117,19 +117,23 @@ async def update_account_state_loop(self): async def dump_account_state(self): """ Save the current account state to the database. + All account/connector combinations from the same snapshot will use the same timestamp. :return: """ await self.ensure_db_initialized() try: + # Generate a single timestamp for this entire snapshot + snapshot_timestamp = datetime.now(timezone.utc) + async with self.db_manager.get_session_context() as session: repository = AccountRepository(session) - # Save each account-connector combination + # Save each account-connector combination with the same timestamp for account_name, connectors in self.accounts_state.items(): for connector_name, tokens_info in connectors.items(): if tokens_info: # Only save if there's token data - await repository.save_account_state(account_name, connector_name, tokens_info) + await repository.save_account_state(account_name, connector_name, tokens_info, snapshot_timestamp) except Exception as e: logger.error(f"Error saving account state to database: {e}") From 25d7509f58e127d96abbf2426217193ecdee4dc4 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 4 Jul 2025 01:46:16 +0200 Subject: [PATCH 199/244] (feat) improve grouping by minute to avoid seconds delay report --- database/repositories/account_repository.py | 31 ++++++++++++++------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/database/repositories/account_repository.py b/database/repositories/account_repository.py index a45b69ca..1e3438ce 100644 --- a/database/repositories/account_repository.py +++ b/database/repositories/account_repository.py @@ -154,8 +154,8 @@ async def get_account_state_history(self, if has_more and account_states: next_cursor = account_states[-1].timestamp.isoformat() - # Format response - history = [] + # Format response - Group by minute to aggregate account/connector states + minute_groups = {} for account_state in account_states: token_info = [] for token_state in account_state.token_states: @@ -167,15 +167,26 @@ async def get_account_state_history(self, "available_units": float(token_state.available_units) }) - state_dict = { - "timestamp": account_state.timestamp.isoformat(), - "state": { - account_state.account_name: { - account_state.connector_name: token_info - } + # Round timestamp to the nearest minute for grouping + minute_timestamp = account_state.timestamp.replace(second=0, microsecond=0) + minute_key = minute_timestamp.isoformat() + + # Initialize minute group if it doesn't exist + if minute_key not in minute_groups: + minute_groups[minute_key] = { + "timestamp": minute_key, + "state": {} } - } - history.append(state_dict) + + # Add account/connector to the minute group + if account_state.account_name not in minute_groups[minute_key]["state"]: + minute_groups[minute_key]["state"][account_state.account_name] = {} + + minute_groups[minute_key]["state"][account_state.account_name][account_state.connector_name] = token_info + + # Convert to list and maintain chronological order (most recent first) + history = list(minute_groups.values()) + history.sort(key=lambda x: x["timestamp"], reverse=True) return history, next_cursor, has_more From 3ace002072dcc38019e6f0527511ad3c2d02a0e8 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 4 Jul 2025 02:22:21 +0200 Subject: [PATCH 200/244] (feat) improve filter to discard other hummingbot realted images that are not bots --- services/bots_orchestrator.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index c324eff6..36f6a0d7 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -1,6 +1,7 @@ import asyncio import logging from typing import Optional +import re import docker @@ -36,9 +37,12 @@ def __init__(self, broker_host, broker_port, broker_username, broker_password): @staticmethod def hummingbot_containers_fiter(container): - """Filter for Hummingbot containers.""" + """Filter for Hummingbot containers based on image name pattern.""" try: - return "hummingbot" in container.name and "broker" not in container.name + # Get the image name (first tag if available, otherwise the image ID) + image_name = container.image.tags[0] if container.image.tags else str(container.image) + pattern = r'.+/hummingbot:' + return bool(re.match(pattern, image_name)) except Exception: return False From 82d14b56a1cd0611e8cf3e098fdc39baf9c1492f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 01:41:02 +0200 Subject: [PATCH 201/244] (feat) imrpove bot state when start stopping --- routers/bot_orchestration.py | 7 +++++++ services/bots_orchestrator.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 35 insertions(+) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 0483bc16..98ce5174 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -238,6 +238,10 @@ async def _background_stop_and_archive( except Exception as e: logging.error(f"Error in background stop-and-archive for {bot_name}: {str(e)}") + finally: + # Always clear the stopping status when the background task completes + bots_manager.clear_bot_stopping(bot_name_for_orchestrator) + logger.info(f"Cleared stopping status for bot {bot_name}") @router.post("/stop-and-archive-bot/{bot_name}") @@ -292,6 +296,9 @@ async def stop_and_archive_bot( # Use the format that's actually stored in active bots bot_name_for_orchestrator = container_name if container_name in active_bots else actual_bot_name + # Mark the bot as stopping before starting the background task + bots_manager.set_bot_stopping(bot_name_for_orchestrator) + # Add the background task background_tasks.add_task( _background_stop_and_archive, diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index 36f6a0d7..1b239313 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -32,6 +32,9 @@ def __init__(self, broker_host, broker_port, broker_username, broker_password): # Active bots tracking self.active_bots = {} self._update_bots_task: Optional[asyncio.Task] = None + + # Track bots that are currently being stopped and archived + self.stopping_bots = set() # MQTT manager will be started asynchronously later @@ -249,6 +252,17 @@ def get_bot_status(self, bot_name): return {"status": "not_found", "error": f"Bot {bot_name} not found"} try: + # Check if bot is currently being stopped and archived + if bot_name in self.stopping_bots: + return { + "status": "stopping", + "message": "Bot is currently being stopped and archived", + "performance": {}, + "error_logs": [], + "general_logs": [], + "recently_active": False, + } + # Get data from MQTT manager controllers_performance = self.mqtt_manager.get_bot_performance(bot_name) performance = self.determine_controller_performance(controllers_performance) @@ -276,3 +290,17 @@ def get_bot_status(self, bot_name): } except Exception as e: return {"status": "error", "error": str(e)} + + def set_bot_stopping(self, bot_name: str): + """Mark a bot as currently being stopped and archived.""" + self.stopping_bots.add(bot_name) + logger.info(f"Marked bot {bot_name} as stopping") + + def clear_bot_stopping(self, bot_name: str): + """Clear the stopping status for a bot.""" + self.stopping_bots.discard(bot_name) + logger.info(f"Cleared stopping status for bot {bot_name}") + + def is_bot_stopping(self, bot_name: str) -> bool: + """Check if a bot is currently being stopped.""" + return bot_name in self.stopping_bots From 873a02965c9d24dffa954425a14d85ad649e24ab Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 01:59:20 +0200 Subject: [PATCH 202/244] (feat) remove bot from mqtt management too --- routers/bot_orchestration.py | 6 ++++++ utils/mqtt_manager.py | 2 ++ 2 files changed, 8 insertions(+) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 98ce5174..b160e3ae 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -242,6 +242,12 @@ async def _background_stop_and_archive( # Always clear the stopping status when the background task completes bots_manager.clear_bot_stopping(bot_name_for_orchestrator) logger.info(f"Cleared stopping status for bot {bot_name}") + + # Remove bot from active_bots and clear all MQTT data + if bot_name_for_orchestrator in bots_manager.active_bots: + bots_manager.mqtt_manager.clear_bot_data(bot_name_for_orchestrator) + del bots_manager.active_bots[bot_name_for_orchestrator] + logger.info(f"Removed bot {bot_name_for_orchestrator} from active_bots and cleared MQTT data") @router.post("/stop-and-archive-bot/{bot_name}") diff --git a/utils/mqtt_manager.py b/utils/mqtt_manager.py index a29ec63c..0fda9069 100644 --- a/utils/mqtt_manager.py +++ b/utils/mqtt_manager.py @@ -452,6 +452,8 @@ def clear_bot_data(self, bot_id: str): self._bot_logs.pop(bot_id, None) self._bot_error_logs.pop(bot_id, None) + self._discovered_bots.pop(bot_id, None) + def clear_bot_performance(self, bot_id: str): """Clear only performance data for a bot (useful when bot is stopped).""" self._bot_performance.pop(bot_id, None) From 715003739d3b36548ef8ac1b83052e2093efbc47 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 04:23:46 +0200 Subject: [PATCH 203/244] (feat) add db management for bot runs --- database/__init__.py | 6 +++--- database/models.py | 35 +++++++++++++++++++++++++++++++ database/repositories/__init__.py | 3 ++- 3 files changed, 40 insertions(+), 4 deletions(-) diff --git a/database/__init__.py b/database/__init__.py index f28a347e..0690b994 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -1,8 +1,8 @@ -from .models import AccountState, TokenState, Order, Trade, PositionSnapshot, FundingPayment, Base +from .models import AccountState, TokenState, Order, Trade, PositionSnapshot, FundingPayment, BotRun, Base from .connection import AsyncDatabaseManager -from .repositories import AccountRepository +from .repositories import AccountRepository, BotRunRepository from .repositories.order_repository import OrderRepository from .repositories.trade_repository import TradeRepository from .repositories.funding_repository import FundingRepository -__all__ = ["AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "Base", "AsyncDatabaseManager", "AccountRepository", "OrderRepository", "TradeRepository", "FundingRepository"] \ No newline at end of file +__all__ = ["AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "BotRun", "Base", "AsyncDatabaseManager", "AccountRepository", "BotRunRepository", "OrderRepository", "TradeRepository", "FundingRepository"] \ No newline at end of file diff --git a/database/models.py b/database/models.py index 35928416..fd40f143 100644 --- a/database/models.py +++ b/database/models.py @@ -175,3 +175,38 @@ class FundingPayment(Base): exchange_funding_id = Column(String, nullable=True, index=True) # Exchange funding ID +class BotRun(Base): + __tablename__ = "bot_runs" + + id = Column(Integer, primary_key=True, index=True) + + # Bot identification + bot_name = Column(String, nullable=False, index=True) + instance_name = Column(String, nullable=False, index=True) + + # Deployment info + deployed_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + strategy_type = Column(String, nullable=False, index=True) # 'script' or 'controller' + strategy_name = Column(String, nullable=False, index=True) + config_name = Column(String, nullable=True, index=True) + + # Runtime tracking + started_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) + stopped_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) + + # Status tracking + deployment_status = Column(String, nullable=False, default="DEPLOYED", index=True) # DEPLOYED, FAILED, ARCHIVED + run_status = Column(String, nullable=False, default="CREATED", index=True) # CREATED, RUNNING, STOPPED, ERROR + + # Configuration and final state + deployment_config = Column(Text, nullable=True) # JSON of full deployment config + final_status = Column(Text, nullable=True) # JSON of final bot state, performance, etc. + + # Account info + account_name = Column(String, nullable=False, index=True) + + # Metadata + image_version = Column(String, nullable=True, index=True) + error_message = Column(Text, nullable=True) + + diff --git a/database/repositories/__init__.py b/database/repositories/__init__.py index 9fb47431..362ea052 100644 --- a/database/repositories/__init__.py +++ b/database/repositories/__init__.py @@ -1,3 +1,4 @@ from .account_repository import AccountRepository +from .bot_run_repository import BotRunRepository -__all__ = ["AccountRepository"] \ No newline at end of file +__all__ = ["AccountRepository", "BotRunRepository"] \ No newline at end of file From 09ede3bd301c4c11f5103c0e5d86321dcf04de10 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 04:24:04 +0200 Subject: [PATCH 204/244] (feat) reduce time for mqtt discovered bots --- services/bots_orchestrator.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index 1b239313..c616674e 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -90,7 +90,7 @@ async def update_active_bots(self, sleep_time=1): docker_bots = await self.get_active_containers() # Get bots from MQTT messages (auto-discovered) - mqtt_bots = self.mqtt_manager.get_discovered_bots(timeout_seconds=300) # 5 minute timeout + mqtt_bots = self.mqtt_manager.get_discovered_bots(timeout_seconds=30) # 30 second timeout # Combine both sources all_active_bots = set(docker_bots + mqtt_bots) @@ -304,3 +304,4 @@ def clear_bot_stopping(self, bot_name: str): def is_bot_stopping(self, bot_name: str) -> bool: """Check if a bot is currently being stopped.""" return bot_name in self.stopping_bots + From 14f9f518d246f61ae6b2580c7a6ee3e1f0db6d1b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 04:24:13 +0200 Subject: [PATCH 205/244] (feat) formatting --- utils/mqtt_manager.py | 1 - 1 file changed, 1 deletion(-) diff --git a/utils/mqtt_manager.py b/utils/mqtt_manager.py index 0fda9069..767ba649 100644 --- a/utils/mqtt_manager.py +++ b/utils/mqtt_manager.py @@ -451,7 +451,6 @@ def clear_bot_data(self, bot_id: str): self._bot_performance.pop(bot_id, None) self._bot_logs.pop(bot_id, None) self._bot_error_logs.pop(bot_id, None) - self._discovered_bots.pop(bot_id, None) def clear_bot_performance(self, bot_id: str): From 874164fb5c354ea2cd6a8bc462587aa2d613495f Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 04:25:10 +0200 Subject: [PATCH 206/244] (feat) add as dependency the db --- deps.py | 8 +- routers/bot_orchestration.py | 291 ++++++++++++++++++++++++++++++++++- 2 files changed, 290 insertions(+), 9 deletions(-) diff --git a/deps.py b/deps.py index 17702b14..accf6f40 100644 --- a/deps.py +++ b/deps.py @@ -4,6 +4,7 @@ from services.docker_service import DockerService from services.market_data_feed_manager import MarketDataFeedManager from utils.bot_archiver import BotArchiver +from database import AsyncDatabaseManager def get_bots_orchestrator(request: Request) -> BotsOrchestrator: @@ -28,4 +29,9 @@ def get_market_data_feed_manager(request: Request) -> MarketDataFeedManager: def get_bot_archiver(request: Request) -> BotArchiver: """Get BotArchiver from app state.""" - return request.app.state.bot_archiver \ No newline at end of file + return request.app.state.bot_archiver + + +def get_database_manager(request: Request) -> AsyncDatabaseManager: + """Get AsyncDatabaseManager from app state.""" + return request.app.state.accounts_service.db_manager \ No newline at end of file diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index b160e3ae..b5dd1fc1 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -10,9 +10,10 @@ from models import StartBotAction, StopBotAction, V2ScriptDeployment, V2ControllerDeployment from services.bots_orchestrator import BotsOrchestrator from services.docker_service import DockerService -from deps import get_bots_orchestrator, get_docker_service, get_bot_archiver +from deps import get_bots_orchestrator, get_docker_service, get_bot_archiver, get_database_manager from utils.file_system import fs_util from utils.bot_archiver import BotArchiver +from database import AsyncDatabaseManager, BotRunRepository router = APIRouter(tags=["Bot Orchestration"], prefix="/bot-orchestration") @@ -121,39 +122,234 @@ async def get_bot_history( @router.post("/start-bot") -async def start_bot(action: StartBotAction, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): +async def start_bot( + action: StartBotAction, + bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): """ Start a bot with the specified configuration. Args: action: StartBotAction containing bot configuration parameters bots_manager: Bot orchestrator service dependency + db_manager: Database manager dependency Returns: Dictionary with status and response from bot start operation """ response = await bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, conf=action.conf, async_backend=action.async_backend) + + # Update bot run status to RUNNING if start was successful + if response.get("success"): + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.update_bot_run_started(action.bot_name) + logger.info(f"Updated bot run status to RUNNING for {action.bot_name}") + except Exception as e: + logger.error(f"Failed to update bot run status: {e}") + # Don't fail the start operation if bot run update fails + return {"status": "success", "response": response} @router.post("/stop-bot") -async def stop_bot(action: StopBotAction, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator)): +async def stop_bot( + action: StopBotAction, + bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): """ Stop a bot with the specified configuration. Args: action: StopBotAction containing bot stop parameters bots_manager: Bot orchestrator service dependency + db_manager: Database manager dependency Returns: Dictionary with status and response from bot stop operation """ response = await bots_manager.stop_bot(action.bot_name, skip_order_cancellation=action.skip_order_cancellation, async_backend=action.async_backend) + + # Update bot run status to STOPPED if stop was successful + if response.get("success"): + try: + # Try to get bot status for final status data + final_status = bots_manager.get_bot_status(action.bot_name) + + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.update_bot_run_stopped( + action.bot_name, + final_status=final_status + ) + logger.info(f"Updated bot run status to STOPPED for {action.bot_name}") + except Exception as e: + logger.error(f"Failed to update bot run status: {e}") + # Don't fail the stop operation if bot run update fails + return {"status": "success", "response": response} +@router.get("/bot-runs") +async def get_bot_runs( + bot_name: str = None, + account_name: str = None, + strategy_type: str = None, + strategy_name: str = None, + run_status: str = None, + deployment_status: str = None, + limit: int = 100, + offset: int = 0, + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Get bot runs with optional filtering. + + Args: + bot_name: Filter by bot name + account_name: Filter by account name + strategy_type: Filter by strategy type (script or controller) + strategy_name: Filter by strategy name + run_status: Filter by run status (CREATED, RUNNING, STOPPED, ERROR) + deployment_status: Filter by deployment status (DEPLOYED, FAILED, ARCHIVED) + limit: Maximum number of results to return + offset: Number of results to skip + db_manager: Database manager dependency + + Returns: + List of bot runs with their details + """ + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + bot_runs = await bot_run_repo.get_bot_runs( + bot_name=bot_name, + account_name=account_name, + strategy_type=strategy_type, + strategy_name=strategy_name, + run_status=run_status, + deployment_status=deployment_status, + limit=limit, + offset=offset + ) + + # Convert bot runs to dictionaries for JSON serialization + runs_data = [] + for run in bot_runs: + run_dict = { + "id": run.id, + "bot_name": run.bot_name, + "instance_name": run.instance_name, + "deployed_at": run.deployed_at.isoformat() if run.deployed_at else None, + "started_at": run.started_at.isoformat() if run.started_at else None, + "stopped_at": run.stopped_at.isoformat() if run.stopped_at else None, + "strategy_type": run.strategy_type, + "strategy_name": run.strategy_name, + "config_name": run.config_name, + "account_name": run.account_name, + "image_version": run.image_version, + "deployment_status": run.deployment_status, + "run_status": run.run_status, + "deployment_config": run.deployment_config, + "final_status": run.final_status, + "error_message": run.error_message + } + runs_data.append(run_dict) + + return { + "status": "success", + "data": runs_data, + "total": len(runs_data), + "limit": limit, + "offset": offset + } + except Exception as e: + logger.error(f"Failed to get bot runs: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/bot-runs/{bot_run_id}") +async def get_bot_run_by_id( + bot_run_id: int, + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Get a specific bot run by ID. + + Args: + bot_run_id: ID of the bot run + db_manager: Database manager dependency + + Returns: + Bot run details + + Raises: + HTTPException: 404 if bot run not found + """ + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + bot_run = await bot_run_repo.get_bot_run_by_id(bot_run_id) + + if not bot_run: + raise HTTPException(status_code=404, detail=f"Bot run {bot_run_id} not found") + + run_dict = { + "id": bot_run.id, + "bot_name": bot_run.bot_name, + "instance_name": bot_run.instance_name, + "deployed_at": bot_run.deployed_at.isoformat() if bot_run.deployed_at else None, + "started_at": bot_run.started_at.isoformat() if bot_run.started_at else None, + "stopped_at": bot_run.stopped_at.isoformat() if bot_run.stopped_at else None, + "strategy_type": bot_run.strategy_type, + "strategy_name": bot_run.strategy_name, + "config_name": bot_run.config_name, + "account_name": bot_run.account_name, + "image_version": bot_run.image_version, + "deployment_status": bot_run.deployment_status, + "run_status": bot_run.run_status, + "deployment_config": bot_run.deployment_config, + "final_status": bot_run.final_status, + "error_message": bot_run.error_message + } + + return {"status": "success", "data": run_dict} + except HTTPException: + raise + except Exception as e: + logger.error(f"Failed to get bot run {bot_run_id}: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + +@router.get("/bot-runs/stats") +async def get_bot_run_stats( + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Get statistics about bot runs. + + Args: + db_manager: Database manager dependency + + Returns: + Bot run statistics + """ + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + stats = await bot_run_repo.get_bot_run_stats() + + return {"status": "success", "data": stats} + except Exception as e: + logger.error(f"Failed to get bot run stats: {e}") + raise HTTPException(status_code=500, detail=str(e)) + + async def _background_stop_and_archive( bot_name: str, container_name: str, @@ -163,7 +359,8 @@ async def _background_stop_and_archive( s3_bucket: str, bots_manager: BotsOrchestrator, docker_manager: DockerService, - bot_archiver: BotArchiver + bot_archiver: BotArchiver, + db_manager: AsyncDatabaseManager ): """Background task to handle the stop and archive process""" try: @@ -233,11 +430,44 @@ async def _background_stop_and_archive( if remove_response.get("success"): logging.info(f"Successfully completed stop-and-archive for bot {bot_name}") + + # Step 6: Update bot run status to ARCHIVED + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.update_bot_run_archived(bot_name) + logger.info(f"Updated bot run status to ARCHIVED for {bot_name}") + except Exception as e: + logger.error(f"Failed to update bot run to archived: {e}") else: logging.error(f"Failed to remove container {container_name}") + # Update bot run with error status + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.update_bot_run_stopped( + bot_name, + error_message="Failed to remove container during archive process" + ) + logger.info(f"Updated bot run with error status for {bot_name}") + except Exception as e: + logger.error(f"Failed to update bot run with error: {e}") + except Exception as e: logging.error(f"Error in background stop-and-archive for {bot_name}: {str(e)}") + + # Update bot run with error status + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.update_bot_run_stopped( + bot_name, + error_message=str(e) + ) + logger.info(f"Updated bot run with error status for {bot_name}") + except Exception as db_error: + logger.error(f"Failed to update bot run with error: {db_error}") finally: # Always clear the stopping status when the background task completes bots_manager.clear_bot_stopping(bot_name_for_orchestrator) @@ -259,7 +489,8 @@ async def stop_and_archive_bot( s3_bucket: str = None, bots_manager: BotsOrchestrator = Depends(get_bots_orchestrator), docker_manager: DockerService = Depends(get_docker_service), - bot_archiver: BotArchiver = Depends(get_bot_archiver) + bot_archiver: BotArchiver = Depends(get_bot_archiver), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) ): """ Gracefully stop a bot and archive its data in the background. @@ -316,7 +547,8 @@ async def stop_and_archive_bot( s3_bucket=s3_bucket, bots_manager=bots_manager, docker_manager=docker_manager, - bot_archiver=bot_archiver + bot_archiver=bot_archiver, + db_manager=db_manager ) return { @@ -338,7 +570,8 @@ async def stop_and_archive_bot( @router.post("/deploy-v2-script") async def deploy_v2_script( config: V2ScriptDeployment, - docker_manager: DockerService = Depends(get_docker_service) + docker_manager: DockerService = Depends(get_docker_service), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) ): """ Creates and autostart a v2 script with a configuration if present. @@ -346,19 +579,42 @@ async def deploy_v2_script( Args: config: Configuration for the new Hummingbot instance docker_manager: Docker service dependency + db_manager: Database manager dependency Returns: Dictionary with creation response and instance details """ logging.info(f"Creating hummingbot instance with config: {config}") response = docker_manager.create_hummingbot_instance(config) + + # Track bot run if deployment was successful + if response.get("success"): + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.create_bot_run( + bot_name=config.instance_name, + instance_name=config.instance_name, + strategy_type="script", + strategy_name=config.script or "unknown", + account_name=config.credentials_profile, + config_name=config.script_config, + image_version=config.image, + deployment_config=config.dict() + ) + logger.info(f"Created bot run record for {config.instance_name}") + except Exception as e: + logger.error(f"Failed to create bot run record: {e}") + # Don't fail the deployment if bot run creation fails + return response @router.post("/deploy-v2-controllers") async def deploy_v2_controllers( deployment: V2ControllerDeployment, - docker_manager: DockerService = Depends(get_docker_service) + docker_manager: DockerService = Depends(get_docker_service), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) ): """ Deploy a V2 strategy with controllers by generating the script config and creating the instance. @@ -425,6 +681,25 @@ async def deploy_v2_controllers( response["script_config_generated"] = script_config_filename response["controllers_deployed"] = deployment.controllers_config + # Track bot run if deployment was successful + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.create_bot_run( + bot_name=deployment.instance_name, + instance_name=deployment.instance_name, + strategy_type="controller", + strategy_name="v2_with_controllers", + account_name=deployment.credentials_profile, + config_name=script_config_filename, + image_version=deployment.image, + deployment_config=deployment.dict() + ) + logger.info(f"Created bot run record for controller deployment {deployment.instance_name}") + except Exception as e: + logger.error(f"Failed to create bot run record: {e}") + # Don't fail the deployment if bot run creation fails + return response except Exception as e: From fa8cda4be39e7398442175fbb431b91e384d8801 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 05:09:40 +0200 Subject: [PATCH 207/244] (feat) remove start tiem --- database/models.py | 1 - routers/bot_orchestration.py | 11 +---------- 2 files changed, 1 insertion(+), 11 deletions(-) diff --git a/database/models.py b/database/models.py index fd40f143..e95b07b0 100644 --- a/database/models.py +++ b/database/models.py @@ -191,7 +191,6 @@ class BotRun(Base): config_name = Column(String, nullable=True, index=True) # Runtime tracking - started_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) stopped_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) # Status tracking diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index b5dd1fc1..698771b2 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -141,16 +141,7 @@ async def start_bot( response = await bots_manager.start_bot(action.bot_name, log_level=action.log_level, script=action.script, conf=action.conf, async_backend=action.async_backend) - # Update bot run status to RUNNING if start was successful - if response.get("success"): - try: - async with db_manager.get_session_context() as session: - bot_run_repo = BotRunRepository(session) - await bot_run_repo.update_bot_run_started(action.bot_name) - logger.info(f"Updated bot run status to RUNNING for {action.bot_name}") - except Exception as e: - logger.error(f"Failed to update bot run status: {e}") - # Don't fail the start operation if bot run update fails + # Bot run tracking simplified - only track deployment and stop times return {"status": "success", "response": response} From 0338a72d38793fd11c4a0d451b08390e66a0fe7a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 5 Jul 2025 21:49:18 +0200 Subject: [PATCH 208/244] (feat) add missing repository --- database/repositories/bot_run_repository.py | 190 ++++++++++++++++++++ 1 file changed, 190 insertions(+) create mode 100644 database/repositories/bot_run_repository.py diff --git a/database/repositories/bot_run_repository.py b/database/repositories/bot_run_repository.py new file mode 100644 index 00000000..cd7ab271 --- /dev/null +++ b/database/repositories/bot_run_repository.py @@ -0,0 +1,190 @@ +import json +from datetime import datetime +from typing import Dict, List, Optional, Any + +from sqlalchemy import desc, select, and_, or_, func +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import BotRun + + +class BotRunRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def create_bot_run( + self, + bot_name: str, + instance_name: str, + strategy_type: str, # 'script' or 'controller' + strategy_name: str, + account_name: str, + config_name: Optional[str] = None, + image_version: Optional[str] = None, + deployment_config: Optional[Dict[str, Any]] = None + ) -> BotRun: + """Create a new bot run record.""" + bot_run = BotRun( + bot_name=bot_name, + instance_name=instance_name, + strategy_type=strategy_type, + strategy_name=strategy_name, + config_name=config_name, + account_name=account_name, + image_version=image_version, + deployment_config=json.dumps(deployment_config) if deployment_config else None, + deployment_status="DEPLOYED", + run_status="CREATED" + ) + + self.session.add(bot_run) + await self.session.flush() + await self.session.refresh(bot_run) + return bot_run + + + async def update_bot_run_stopped( + self, + bot_name: str, + final_status: Optional[Dict[str, Any]] = None, + error_message: Optional[str] = None + ) -> Optional[BotRun]: + """Mark a bot run as stopped and save final status.""" + stmt = select(BotRun).where( + and_( + BotRun.bot_name == bot_name, + or_(BotRun.run_status == "RUNNING", BotRun.run_status == "CREATED") + ) + ).order_by(desc(BotRun.deployed_at)) + + result = await self.session.execute(stmt) + bot_run = result.scalar_one_or_none() + + if bot_run: + bot_run.run_status = "STOPPED" if not error_message else "ERROR" + bot_run.stopped_at = datetime.utcnow() + bot_run.final_status = json.dumps(final_status) if final_status else None + bot_run.error_message = error_message + await self.session.flush() + await self.session.refresh(bot_run) + + return bot_run + + async def update_bot_run_archived(self, bot_name: str) -> Optional[BotRun]: + """Mark a bot run as archived.""" + stmt = select(BotRun).where( + BotRun.bot_name == bot_name + ).order_by(desc(BotRun.deployed_at)) + + result = await self.session.execute(stmt) + bot_run = result.scalar_one_or_none() + + if bot_run: + bot_run.deployment_status = "ARCHIVED" + await self.session.flush() + await self.session.refresh(bot_run) + + return bot_run + + async def get_bot_runs( + self, + bot_name: Optional[str] = None, + account_name: Optional[str] = None, + strategy_type: Optional[str] = None, + strategy_name: Optional[str] = None, + run_status: Optional[str] = None, + deployment_status: Optional[str] = None, + limit: int = 100, + offset: int = 0 + ) -> List[BotRun]: + """Get bot runs with optional filters.""" + stmt = select(BotRun) + + conditions = [] + if bot_name: + conditions.append(BotRun.bot_name == bot_name) + if account_name: + conditions.append(BotRun.account_name == account_name) + if strategy_type: + conditions.append(BotRun.strategy_type == strategy_type) + if strategy_name: + conditions.append(BotRun.strategy_name == strategy_name) + if run_status: + conditions.append(BotRun.run_status == run_status) + if deployment_status: + conditions.append(BotRun.deployment_status == deployment_status) + + if conditions: + stmt = stmt.where(and_(*conditions)) + + stmt = stmt.order_by(desc(BotRun.deployed_at)).limit(limit).offset(offset) + + result = await self.session.execute(stmt) + return result.scalars().all() + + async def get_bot_run_by_id(self, bot_run_id: int) -> Optional[BotRun]: + """Get a specific bot run by ID.""" + stmt = select(BotRun).where(BotRun.id == bot_run_id) + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def get_latest_bot_run(self, bot_name: str) -> Optional[BotRun]: + """Get the latest bot run for a specific bot.""" + stmt = select(BotRun).where( + BotRun.bot_name == bot_name + ).order_by(desc(BotRun.deployed_at)) + + result = await self.session.execute(stmt) + return result.scalar_one_or_none() + + async def get_active_bot_runs(self) -> List[BotRun]: + """Get all currently active (running) bot runs.""" + stmt = select(BotRun).where( + and_( + BotRun.run_status == "RUNNING", + BotRun.deployment_status == "DEPLOYED" + ) + ).order_by(desc(BotRun.deployed_at)) + + result = await self.session.execute(stmt) + return result.scalars().all() + + async def get_bot_run_stats(self) -> Dict[str, Any]: + """Get statistics about bot runs.""" + # Total runs + total_stmt = select(func.count(BotRun.id)) + total_result = await self.session.execute(total_stmt) + total_runs = total_result.scalar() + + # Active runs + active_stmt = select(func.count(BotRun.id)).where( + and_( + BotRun.run_status == "RUNNING", + BotRun.deployment_status == "DEPLOYED" + ) + ) + active_result = await self.session.execute(active_stmt) + active_runs = active_result.scalar() + + # Runs by strategy type + strategy_stmt = select( + BotRun.strategy_type, + func.count(BotRun.id).label('count') + ).group_by(BotRun.strategy_type) + strategy_result = await self.session.execute(strategy_stmt) + strategy_counts = {row.strategy_type: row.count for row in strategy_result} + + # Runs by status + status_stmt = select( + BotRun.run_status, + func.count(BotRun.id).label('count') + ).group_by(BotRun.run_status) + status_result = await self.session.execute(status_stmt) + status_counts = {row.run_status: row.count for row in status_result} + + return { + "total_runs": total_runs, + "active_runs": active_runs, + "strategy_type_counts": strategy_counts, + "status_counts": status_counts + } \ No newline at end of file From 6b3fb0d3ed2daf1e6f821c1b3419e63df8bd6cc4 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 7 Jul 2025 20:21:10 +0300 Subject: [PATCH 209/244] (feat) add headless support for deployment --- models/bot_orchestration.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/models/bot_orchestration.py b/models/bot_orchestration.py index b328133a..90a18323 100644 --- a/models/bot_orchestration.py +++ b/models/bot_orchestration.py @@ -101,6 +101,7 @@ class V2ScriptDeployment(BaseModel): image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") script: Optional[str] = Field(default=None, description="Name of the script to run (without .py extension)") script_config: Optional[str] = Field(default=None, description="Name of the script configuration file (without .yml extension)") + headless: bool = Field(default=False, description="Run in headless mode (no UI)") class V2ControllerDeployment(BaseModel): @@ -110,4 +111,5 @@ class V2ControllerDeployment(BaseModel): controllers_config: List[str] = Field(description="List of controller configuration files to use (without .yml extension)") max_global_drawdown_quote: Optional[float] = Field(default=None, description="Maximum allowed global drawdown in quote usually USDT") max_controller_drawdown_quote: Optional[float] = Field(default=None, description="Maximum allowed per-controller drawdown in quote usually USDT") - image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") \ No newline at end of file + image: str = Field(default="hummingbot/hummingbot:latest", description="Docker image for the Hummingbot instance") + headless: bool = Field(default=False, description="Run in headless mode (no UI)") \ No newline at end of file From 93f3a683713018d6e313e0e09c881d0180121e72 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 7 Jul 2025 20:21:32 +0300 Subject: [PATCH 210/244] (feat) add headless env variable for deploy --- services/docker_service.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/docker_service.py b/services/docker_service.py index 033a9ca7..69ac32e9 100644 --- a/services/docker_service.py +++ b/services/docker_service.py @@ -254,6 +254,9 @@ def create_hummingbot_instance(self, config: V2ScriptDeployment): else: return {"success": False, "message": "Password not provided. We cannot start the bot without a password."} + if config.headless: + environment["HEADLESS"] = "true" + log_config = LogConfig( type="json-file", config={ From 8054b19b4e51017d4f75285708a942d7e844d4bf Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 7 Jul 2025 20:21:54 +0300 Subject: [PATCH 211/244] (feat) fix message duplication --- utils/mqtt_manager.py | 42 ++++++++++++++++++++++++++++++++++++++---- 1 file changed, 38 insertions(+), 4 deletions(-) diff --git a/utils/mqtt_manager.py b/utils/mqtt_manager.py index 767ba649..e2a1473d 100644 --- a/utils/mqtt_manager.py +++ b/utils/mqtt_manager.py @@ -33,6 +33,10 @@ def __init__(self, host: str, port: int, username: str, password: str): # Auto-discovered bots self._discovered_bots: Dict[str, float] = {} # bot_id: last_seen_timestamp + + # Message deduplication tracking + self._processed_messages: Dict[str, float] = {} # message_hash: timestamp + self._message_ttl = 300 # 5 minutes TTL for processed messages # Connection state self._connected = False @@ -186,17 +190,47 @@ async def _handle_performance(self, bot_id: str, data: Any): self._bot_performance[bot_id][controller_id] = performance async def _handle_log(self, bot_id: str, data: Any): - """Handle log messages.""" + """Handle log messages with deduplication.""" + # Create a unique message identifier for deduplication if isinstance(data, dict): - # Check for different possible field names level = data.get("level_name") or data.get("levelname") or data.get("level", "INFO") message = data.get("msg") or data.get("message", "") + timestamp = data.get("timestamp") or data.get("time") or time.time() + + # Create hash for deduplication (bot_id + message + timestamp within 1 second) + message_hash = f"{bot_id}:{message}:{int(timestamp)}" + elif isinstance(data, str): + message = data + timestamp = time.time() + level = "INFO" + + # Create hash for string messages + message_hash = f"{bot_id}:{message}:{int(timestamp)}" + else: + return # Skip invalid data + # Check for duplicates + current_time = time.time() + if message_hash in self._processed_messages: + # Skip duplicate message + logger.debug(f"Skipping duplicate log message from {bot_id}: {message[:50]}...") + return + + # Clean up old message hashes (older than TTL) + expired_hashes = [h for h, t in self._processed_messages.items() if current_time - t > self._message_ttl] + for h in expired_hashes: + del self._processed_messages[h] + + # Record this message as processed + self._processed_messages[message_hash] = current_time + + # Process the message + if isinstance(data, dict): # Normalize the log entry log_entry = { "level_name": level, "msg": message, - "timestamp": data.get("timestamp") or data.get("time") or time.time(), + "timestamp": timestamp, **data, # Include all original fields } @@ -206,7 +240,7 @@ async def _handle_log(self, bot_id: str, data: Any): self._bot_logs[bot_id].append(log_entry) elif isinstance(data, str): # Handle plain string logs - log_entry = {"level_name": "INFO", "msg": data, "timestamp": time.time()} + log_entry = {"level_name": "INFO", "msg": data, "timestamp": timestamp} self._bot_logs[bot_id].append(log_entry) async def _handle_notify(self, bot_id: str, data: Any): From 603ab7148c9ec0499f14eed995a8c7d354d220ec Mon Sep 17 00:00:00 2001 From: cardosofede Date: Mon, 7 Jul 2025 23:52:10 +0300 Subject: [PATCH 212/244] (feat) remove stopping bot from all bots --- services/bots_orchestrator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index c616674e..ef25aab1 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -93,7 +93,7 @@ async def update_active_bots(self, sleep_time=1): mqtt_bots = self.mqtt_manager.get_discovered_bots(timeout_seconds=30) # 30 second timeout # Combine both sources - all_active_bots = set(docker_bots + mqtt_bots) + all_active_bots = set([bot for bot in docker_bots + mqtt_bots if not self.is_bot_stopping(bot)]) # Remove bots that are no longer active for bot_name in list(self.active_bots): From e9f4ededbcd6410bd9e24a67d4d8c9eb79cb2ce2 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 8 Jul 2025 01:10:41 +0300 Subject: [PATCH 213/244] (feat) add stopped at --- database/repositories/bot_run_repository.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/database/repositories/bot_run_repository.py b/database/repositories/bot_run_repository.py index cd7ab271..3999389e 100644 --- a/database/repositories/bot_run_repository.py +++ b/database/repositories/bot_run_repository.py @@ -1,5 +1,5 @@ import json -from datetime import datetime +from datetime import datetime, timezone from typing import Dict, List, Optional, Any from sqlalchemy import desc, select, and_, or_, func @@ -81,6 +81,7 @@ async def update_bot_run_archived(self, bot_name: str) -> Optional[BotRun]: if bot_run: bot_run.deployment_status = "ARCHIVED" + bot_run.stopped_at = datetime.now(timezone.utc) await self.session.flush() await self.session.refresh(bot_run) From 5ae1267e5a33c3e4b397daa1525788643e91caea Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 8 Jul 2025 01:10:50 +0300 Subject: [PATCH 214/244] (feat) dump final status in db --- routers/bot_orchestration.py | 44 ++++++++++++++++++++++++++---------- 1 file changed, 32 insertions(+), 12 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index 698771b2..f68d2496 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -357,7 +357,30 @@ async def _background_stop_and_archive( try: logger.info(f"Starting background stop-and-archive for {bot_name}") - # Step 1: Stop the bot trading process + # Step 1: Capture bot final status before stopping (while bot is still running) + logger.info(f"Capturing final status for {bot_name_for_orchestrator}") + final_status = None + try: + final_status = bots_manager.get_bot_status(bot_name_for_orchestrator) + logger.info(f"Captured final status for {bot_name_for_orchestrator}: {final_status}") + except Exception as e: + logger.warning(f"Failed to capture final status for {bot_name_for_orchestrator}: {e}") + + # Step 2: Update bot run with stopped_at timestamp and final status before stopping + try: + async with db_manager.get_session_context() as session: + bot_run_repo = BotRunRepository(session) + await bot_run_repo.update_bot_run_stopped( + bot_name, + final_status=final_status + ) + logger.info(f"Updated bot run with stopped_at timestamp and final status for {bot_name}") + except Exception as e: + logger.error(f"Failed to update bot run with stopped status: {e}") + # Continue with stop process even if database update fails + + # Step 3: Mark the bot as stopping, and stop the bot trading process + bots_manager.set_bot_stopping(bot_name_for_orchestrator) logger.info(f"Stopping bot trading process for {bot_name_for_orchestrator}") stop_response = await bots_manager.stop_bot( bot_name_for_orchestrator, @@ -370,11 +393,11 @@ async def _background_stop_and_archive( logger.error(f"Failed to stop bot process: {error_msg}") return - # Step 2: Wait for graceful shutdown (15 seconds as requested) + # Step 4: Wait for graceful shutdown (15 seconds as requested) logger.info(f"Waiting 15 seconds for bot {bot_name} to gracefully shutdown") await asyncio.sleep(15) - # Step 3: Stop the container with monitoring + # Step 5: Stop the container with monitoring max_retries = 10 retry_interval = 2 container_stopped = False @@ -396,7 +419,7 @@ async def _background_stop_and_archive( logger.error(f"Failed to stop container {container_name} after {max_retries} attempts") return - # Step 4: Archive the bot data + # Step 6: Archive the bot data instance_dir = os.path.join('bots', 'instances', container_name) logger.info(f"Archiving bot data from {instance_dir}") @@ -410,7 +433,7 @@ async def _background_stop_and_archive( logger.error(f"Archive failed: {str(e)}") # Continue with removal even if archive fails - # Step 5: Remove the container + # Step 7: Remove the container logging.info(f"Removing container {container_name}") remove_response = docker_manager.remove_container(container_name, force=False) @@ -422,18 +445,18 @@ async def _background_stop_and_archive( if remove_response.get("success"): logging.info(f"Successfully completed stop-and-archive for bot {bot_name}") - # Step 6: Update bot run status to ARCHIVED + # Step 8: Update bot run deployment status to ARCHIVED try: async with db_manager.get_session_context() as session: bot_run_repo = BotRunRepository(session) await bot_run_repo.update_bot_run_archived(bot_name) - logger.info(f"Updated bot run status to ARCHIVED for {bot_name}") + logger.info(f"Updated bot run deployment status to ARCHIVED for {bot_name}") except Exception as e: logger.error(f"Failed to update bot run to archived: {e}") else: logging.error(f"Failed to remove container {container_name}") - # Update bot run with error status + # Update bot run with error status (but keep stopped_at timestamp from earlier) try: async with db_manager.get_session_context() as session: bot_run_repo = BotRunRepository(session) @@ -523,10 +546,7 @@ async def stop_and_archive_bot( # Use the format that's actually stored in active bots bot_name_for_orchestrator = container_name if container_name in active_bots else actual_bot_name - - # Mark the bot as stopping before starting the background task - bots_manager.set_bot_stopping(bot_name_for_orchestrator) - + # Add the background task background_tasks.add_task( _background_stop_and_archive, From db5fa6c4841936fb369c67ccfa82bc2dbe60dee3 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 8 Jul 2025 01:11:48 +0300 Subject: [PATCH 215/244] (feat) avoid returning bots that are in stopping process (has to be improved in following versions) --- services/bots_orchestrator.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/services/bots_orchestrator.py b/services/bots_orchestrator.py index ef25aab1..b4449e95 100644 --- a/services/bots_orchestrator.py +++ b/services/bots_orchestrator.py @@ -82,7 +82,7 @@ def stop(self): # Stop MQTT manager asynchronously asyncio.create_task(self.mqtt_manager.stop()) - async def update_active_bots(self, sleep_time=1): + async def update_active_bots(self, sleep_time=1.0): """Monitor and update active bots list using both Docker and MQTT discovery.""" while True: try: @@ -236,9 +236,10 @@ def determine_controller_performance(controllers_performance): return cleaned_performance def get_all_bots_status(self): + # TODO: improve logic of bots state management """Get status information for all active bots.""" all_bots_status = {} - for bot in self.active_bots: + for bot in [bot for bot in self.active_bots if not self.is_bot_stopping(bot)]: status = self.get_bot_status(bot) status["source"] = self.active_bots[bot].get("source", "unknown") all_bots_status[bot] = status From 218e6f95b9e06112ac84cb57f6585394558236fc Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 8 Jul 2025 18:09:02 +0300 Subject: [PATCH 216/244] (feat) fix password verification path --- config.py | 2 +- utils/security.py | 16 +++++++++------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/config.py b/config.py index 62c90f81..97abdd45 100644 --- a/config.py +++ b/config.py @@ -70,7 +70,7 @@ class AppSettings(BaseSettings): # Static paths controllers_path: str = "bots/conf/controllers" controllers_module: str = "bots.controllers" - password_verification_path: str = "bots/credentials/master_account/.password_verification" + password_verification_path: str = "credentials/master_account/.password_verification" # Environment-configurable settings logfire_environment: str = Field( diff --git a/utils/security.py b/utils/security.py index 095a7fdd..c64bd823 100644 --- a/utils/security.py +++ b/utils/security.py @@ -60,17 +60,14 @@ def update_connector_keys(cls, account_name: str, connector_config: ClientConfig @staticmethod def new_password_required() -> bool: - return not Path(settings.app.password_verification_path).exists() - - @staticmethod - def store_password_verification(secrets_manager: BaseSecretsManager): - encrypted_word = secrets_manager.encrypt_secret_value(PASSWORD_VERIFICATION_WORD, PASSWORD_VERIFICATION_WORD) - fs_util.ensure_file_and_dump_text(settings.app.password_verification_path, encrypted_word) + full_path = fs_util._get_full_path(settings.app.password_verification_path) + return not Path(full_path).exists() @staticmethod def validate_password(secrets_manager: BaseSecretsManager) -> bool: valid = False - with open(settings.app.password_verification_path, "r") as f: + full_path = fs_util._get_full_path(settings.app.password_verification_path) + with open(full_path, "r") as f: encrypted_word = f.read() try: decrypted_word = secrets_manager.decrypt_secret_value(PASSWORD_VERIFICATION_WORD, encrypted_word) @@ -79,3 +76,8 @@ def validate_password(secrets_manager: BaseSecretsManager) -> bool: if str(e) != "MAC mismatch": raise e return valid + + @staticmethod + def store_password_verification(secrets_manager: BaseSecretsManager): + encrypted_word = secrets_manager.encrypt_secret_value(PASSWORD_VERIFICATION_WORD, PASSWORD_VERIFICATION_WORD) + fs_util.ensure_file_and_dump_text(settings.app.password_verification_path, encrypted_word) From 066cb9aada57d3e332129fef508c35be835df8d6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Tue, 8 Jul 2025 18:09:10 +0300 Subject: [PATCH 217/244] (feat) remove BOTS_PATH override --- docker-compose.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 8d64d790..c6167528 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,7 +13,6 @@ services: # Override specific values for Docker networking - BROKER_HOST=emqx - DATABASE_URL=postgresql+asyncpg://hbot:hummingbot-api@postgres:5432/hummingbot_api - - BOTS_PATH=/hummingbot-api/bots networks: - emqx-bridge depends_on: From 835eb963b3918e831c099e6f8b37f9be0994a36c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 9 Jul 2025 15:58:43 +0300 Subject: [PATCH 218/244] (feat) add get available candles connectors --- routers/market_data.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/routers/market_data.py b/routers/market_data.py index 9be143b0..bb862cdb 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -3,6 +3,7 @@ from fastapi import APIRouter, Request, HTTPException, Depends from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig +from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory from services.market_data_feed_manager import MarketDataFeedManager from models import ( PriceRequest, PricesResponse, FundingInfoRequest, FundingInfoResponse, @@ -128,6 +129,17 @@ async def get_market_data_settings(): } +@router.get("/available-candle-connectors") +async def get_available_candle_connectors(): + """ + Get list of available connectors that support candle data feeds. + + Returns: + List of connector names that can be used for fetching candle data + """ + return list(CandlesFactory._candles_map.keys()) + + # Enhanced Market Data Endpoints @router.post("/prices", response_model=PricesResponse) From 137356476dfb35f2ef4f2b96b70df6fde6deb571 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 9 Jul 2025 18:58:14 +0300 Subject: [PATCH 219/244] (feat) add rate oracle --- services/market_data_feed_manager.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py index 88ed776f..3153b879 100644 --- a/services/market_data_feed_manager.py +++ b/services/market_data_feed_manager.py @@ -4,6 +4,7 @@ import logging from enum import Enum +from hummingbot.core.rate_oracle.rate_oracle import RateOracle from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.data_feed.market_data_provider import MarketDataProvider @@ -25,7 +26,7 @@ class MarketDataFeedManager: are automatically stopped and cleaned up. """ - def __init__(self, market_data_provider: MarketDataProvider, cleanup_interval: int = 300, feed_timeout: int = 600): + def __init__(self, market_data_provider: MarketDataProvider, rate_oracle: RateOracle, cleanup_interval: int = 300, feed_timeout: int = 600): """ Initialize the MarketDataFeedManager. @@ -35,6 +36,7 @@ def __init__(self, market_data_provider: MarketDataProvider, cleanup_interval: i feed_timeout: How long to keep unused feeds alive (seconds, default: 10 minutes) """ self.market_data_provider = market_data_provider + self.rate_oracle = rate_oracle self.cleanup_interval = cleanup_interval self.feed_timeout = feed_timeout self.last_access_times: Dict[str, float] = {} @@ -55,6 +57,7 @@ def start(self): if not self._is_running: self._is_running = True self._cleanup_task = asyncio.create_task(self._cleanup_loop()) + self.rate_oracle.start() self.logger.info(f"MarketDataFeedManager started with cleanup_interval={self.cleanup_interval}s, feed_timeout={self.feed_timeout}s") def stop(self): From 96c4df41f1e4f95c59e2e8e43544ace9959617dd Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 9 Jul 2025 18:58:35 +0300 Subject: [PATCH 220/244] (feat) reorder logging --- services/orders_recorder.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 6f640fb2..0b4257ae 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -3,8 +3,6 @@ import math import time -# Create module-specific logger -logger = logging.getLogger(__name__) from typing import Any, Optional, Union from datetime import datetime from decimal import Decimal, InvalidOperation @@ -18,9 +16,11 @@ MarketEvent ) from hummingbot.connector.connector_base import ConnectorBase - from database import AsyncDatabaseManager, OrderRepository, TradeRepository +# Initialize logger +logger = logging.getLogger(__name__) + class OrdersRecorder: """ @@ -172,7 +172,7 @@ async def _handle_order_filled(self, event: OrderFilledEvent): async with self.db_manager.get_session_context() as session: order_repo = OrderRepository(session) trade_repo = TradeRepository(session) - + # Calculate fees trade_fee_paid = 0 trade_fee_currency = None @@ -185,7 +185,7 @@ async def _handle_order_filled(self, event: OrderFilledEvent): price=event.price, order_amount=event.amount, token=quote_asset, - exchange=self._connector + exchange=self._connector, ) trade_fee_paid = float(fee_in_quote) trade_fee_currency = quote_asset From b14371dd61d4323bc1f545590f6b0f53340120ab Mon Sep 17 00:00:00 2001 From: cardosofede Date: Wed, 9 Jul 2025 18:59:12 +0300 Subject: [PATCH 221/244] (feat) patch save to yaml function tmp --> waiting from complete decouple of client config map --- main.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/main.py b/main.py index 67cf2847..dc46c3e5 100644 --- a/main.py +++ b/main.py @@ -5,9 +5,24 @@ import logfire import logging from dotenv import load_dotenv + # Load environment variables early load_dotenv() +# Monkey patch save_to_yml to prevent writes to library directory +def patched_save_to_yml(yml_path, cm): + """Patched version of save_to_yml that prevents writes to library directory""" + import logging + logger = logging.getLogger(__name__) + logger.debug(f"Skipping config write to {yml_path} (patched for API mode)") + # Do nothing - this prevents the original function from trying to write to the library directory + +# Apply the patch before importing hummingbot components +from hummingbot.client.config import config_helpers +config_helpers.save_to_yml = patched_save_to_yml + +from hummingbot.core.rate_oracle.rate_oracle import RateOracle + from fastapi import Depends, FastAPI, HTTPException, status from fastapi.security import HTTPBasic, HTTPBasicCredentials from fastapi.middleware.cors import CORSMiddleware @@ -96,6 +111,7 @@ async def lifespan(app: FastAPI): # Initialize MarketDataFeedManager with lifecycle management market_data_feed_manager = MarketDataFeedManager( market_data_provider=market_data_provider, + rate_oracle=RateOracle.get_instance(), cleanup_interval=settings.market_data.cleanup_interval, feed_timeout=settings.market_data.feed_timeout ) From 2a249ba2a044b9c8578a0c46d23b9d99cdc43674 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 00:11:47 +0300 Subject: [PATCH 222/244] (feat) remove started at --- routers/bot_orchestration.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index f68d2496..ef6ab761 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -237,7 +237,6 @@ async def get_bot_runs( "bot_name": run.bot_name, "instance_name": run.instance_name, "deployed_at": run.deployed_at.isoformat() if run.deployed_at else None, - "started_at": run.started_at.isoformat() if run.started_at else None, "stopped_at": run.stopped_at.isoformat() if run.stopped_at else None, "strategy_type": run.strategy_type, "strategy_name": run.strategy_name, @@ -295,7 +294,6 @@ async def get_bot_run_by_id( "bot_name": bot_run.bot_name, "instance_name": bot_run.instance_name, "deployed_at": bot_run.deployed_at.isoformat() if bot_run.deployed_at else None, - "started_at": bot_run.started_at.isoformat() if bot_run.started_at else None, "stopped_at": bot_run.stopped_at.isoformat() if bot_run.stopped_at else None, "strategy_type": bot_run.strategy_type, "strategy_name": bot_run.strategy_name, From 8e3fc36a018a0fa084d36a253a260850a24db99e Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 01:44:34 +0300 Subject: [PATCH 223/244] (feat) force position update when requested --- services/accounts_service.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/services/accounts_service.py b/services/accounts_service.py index e1a92ae1..4fa05cb9 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1085,6 +1085,9 @@ async def get_account_positions(self, account_name: str, connector_name: str) -> raise HTTPException(status_code=400, detail=f"Connector '{connector_name}' does not support position tracking") try: + # Force position update to ensure current market prices are used + await connector._update_positions() + positions = [] raw_positions = connector.account_positions From 5c35eaf97fd76eeb129be490b4f8d60791b15098 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 02:15:13 +0300 Subject: [PATCH 224/244] (feat) increase ob depth --- models/market_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/market_data.py b/models/market_data.py index 8fdf4930..8b86291d 100644 --- a/models/market_data.py +++ b/models/market_data.py @@ -92,7 +92,7 @@ class OrderBookRequest(BaseModel): """Request model for getting order book data""" connector_name: str = Field(description="Name of the connector") trading_pair: str = Field(description="Trading pair") - depth: int = Field(default=10, ge=1, le=100, description="Number of price levels to return") + depth: int = Field(default=10, ge=1, le=1000, description="Number of price levels to return") class OrderBookLevel(BaseModel): From 458593152954afa9caef96d39739ada1bca7e2e0 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 02:35:59 +0300 Subject: [PATCH 225/244] (feat) fix portfolio distribution filter --- routers/portfolio.py | 72 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 69 insertions(+), 3 deletions(-) diff --git a/routers/portfolio.py b/routers/portfolio.py index d7486550..9164d39e 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -148,10 +148,10 @@ async def get_portfolio_distribution( """ if not filter_request.account_names: # Get distribution for all accounts - return accounts_service.get_portfolio_distribution() + distribution = accounts_service.get_portfolio_distribution() elif len(filter_request.account_names) == 1: # Single account - use existing method - return accounts_service.get_portfolio_distribution(filter_request.account_names[0]) + distribution = accounts_service.get_portfolio_distribution(filter_request.account_names[0]) else: # Multiple accounts - need to aggregate aggregated_distribution = { @@ -195,7 +195,73 @@ async def get_portfolio_distribution( aggregated_distribution["token_count"] = len(aggregated_distribution["tokens"]) - return aggregated_distribution + distribution = aggregated_distribution + + # Apply connector filter if specified + if filter_request.connector_names: + filtered_distribution = [] + filtered_total_value = 0 + + for token_data in distribution.get("distribution", []): + filtered_token = { + "token": token_data["token"], + "total_value": 0, + "total_units": 0, + "percentage": 0, + "accounts": {} + } + + # Filter each account's connectors + for account_name, account_data in token_data.get("accounts", {}).items(): + if "connectors" in account_data: + filtered_connectors = {} + account_value = 0 + account_units = 0 + + # Only include specified connectors + for connector_name in filter_request.connector_names: + if connector_name in account_data["connectors"]: + filtered_connectors[connector_name] = account_data["connectors"][connector_name] + account_value += account_data["connectors"][connector_name].get("value", 0) + account_units += account_data["connectors"][connector_name].get("units", 0) + + # Only include account if it has matching connectors + if filtered_connectors: + filtered_token["accounts"][account_name] = { + "value": round(account_value, 6), + "units": account_units, + "percentage": 0, # Will be recalculated later + "connectors": filtered_connectors + } + + filtered_token["total_value"] += account_value + filtered_token["total_units"] += account_units + + # Only include token if it has values after filtering + if filtered_token["total_value"] > 0: + filtered_distribution.append(filtered_token) + filtered_total_value += filtered_token["total_value"] + + # Recalculate percentages after filtering + if filtered_total_value > 0: + for token_data in filtered_distribution: + token_data["percentage"] = round((token_data["total_value"] / filtered_total_value) * 100, 4) + # Update account percentages + for account_data in token_data["accounts"].values(): + account_data["percentage"] = round((account_data["value"] / filtered_total_value) * 100, 4) + + # Sort by value (descending) + filtered_distribution.sort(key=lambda x: x["total_value"], reverse=True) + + # Update the distribution + distribution = { + "total_portfolio_value": round(filtered_total_value, 6), + "token_count": len(filtered_distribution), + "distribution": filtered_distribution, + "account_filter": distribution.get("account_filter", "filtered") + } + + return distribution @router.post("/accounts-distribution") From 49fc09f0be6fcdea3830dddd9245008b9b19ab68 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 02:41:47 +0300 Subject: [PATCH 226/244] (feat) fix for not a real order id --- routers/trading.py | 2 +- services/accounts_service.py | 13 ++++++++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index 8bbad64f..69746365 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -95,7 +95,7 @@ async def cancel_order(account_name: str, connector_name: str, client_order_id: connector_name=connector_name, client_order_id=client_order_id ) - return {"message": f"Order {cancelled_order_id} cancelled successfully"} + return {"message": f"Order cancellation initiated for {cancelled_order_id}"} except HTTPException: raise except Exception as e: diff --git a/services/accounts_service.py b/services/accounts_service.py index 4fa05cb9..073621cc 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -838,16 +838,23 @@ async def cancel_order(self, account_name: str, connector_name: str, client_orde Returns: Client order ID that was cancelled + + Raises: + HTTPException: 404 if order not found, 500 if cancellation fails """ connector = await self.get_connector_instance(account_name, connector_name) + # Check if order exists in in-flight orders + if client_order_id not in connector.in_flight_orders: + raise HTTPException(status_code=404, detail=f"Order '{client_order_id}' not found in active orders") + try: result = connector.cancel(trading_pair="NA", client_order_id=client_order_id) - logger.info(f"Cancelled order {client_order_id} on {connector_name} (Account: {account_name})") + logger.info(f"Initiated cancellation for order {client_order_id} on {connector_name} (Account: {account_name})") return result except Exception as e: - logger.error(f"Failed to cancel order {client_order_id}: {e}") - raise HTTPException(status_code=500, detail=f"Failed to cancel order: {str(e)}") + logger.error(f"Failed to initiate cancellation for order {client_order_id}: {e}") + raise HTTPException(status_code=500, detail=f"Failed to initiate order cancellation: {str(e)}") async def set_leverage(self, account_name: str, connector_name: str, trading_pair: str, leverage: int) -> Dict[str, str]: From f0800b83496d503ad352d75322ce183878897bc0 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 17:17:49 +0300 Subject: [PATCH 227/244] (feat) revamp markets,symbols --> connector_name,trading_pair --- models/archived_bots.py | 4 ++-- routers/archived_bots.py | 8 ++++---- routers/trading.py | 16 ++++++++-------- services/accounts_service.py | 24 ++++++++++++------------ utils/hummingbot_database_reader.py | 16 +++++++--------- 5 files changed, 33 insertions(+), 35 deletions(-) diff --git a/models/archived_bots.py b/models/archived_bots.py index c03d32fe..cea57962 100644 --- a/models/archived_bots.py +++ b/models/archived_bots.py @@ -52,8 +52,8 @@ class TradeDetail(BaseModel): id: Optional[int] = Field(default=None, description="Trade ID") config_file_path: str = Field(description="Configuration file path") strategy: str = Field(description="Strategy name") - market: str = Field(description="Market/exchange name") - symbol: str = Field(description="Trading symbol") + connector_name: str = Field(description="Connector name") + trading_pair: str = Field(description="Trading pair") base_asset: str = Field(description="Base asset") quote_asset: str = Field(description="Quote asset") timestamp: datetime = Field(description="Trade timestamp") diff --git a/routers/archived_bots.py b/routers/archived_bots.py index a15825dd..6ff6c6ef 100644 --- a/routers/archived_bots.py +++ b/routers/archived_bots.py @@ -68,8 +68,8 @@ async def get_database_summary(db_path: str): "total_executors": len(executors), "total_positions": len(positions), "total_controllers": len(controllers), - "trading_pairs": orders["symbol"].unique().tolist() if len(orders) > 0 else [], - "exchanges": orders["market"].unique().tolist() if len(orders) > 0 else [], + "trading_pairs": orders["trading_pair"].unique().tolist() if len(orders) > 0 else [], + "exchanges": orders["connector_name"].unique().tolist() if len(orders) > 0 else [], } except Exception as e: raise HTTPException(status_code=500, detail=f"Error analyzing database: {str(e)}") @@ -111,8 +111,8 @@ async def get_database_performance(db_path: str): "final_unrealized_pnl_quote": float(final_row.get('unrealized_trade_pnl_quote', 0)), "total_fees_quote": float(performance_data['fees_quote'].sum()), "final_net_position": float(final_row.get('net_position', 0)), - "trading_pairs": performance_data['symbol'].unique().tolist(), - "markets": performance_data['market'].unique().tolist() + "trading_pairs": performance_data['trading_pair'].unique().tolist(), + "connector_names": performance_data['connector_name'].unique().tolist() } return { diff --git a/routers/trading.py b/routers/trading.py index 69746365..872eae81 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -314,8 +314,8 @@ async def get_orders( try: orders = await accounts_service.get_orders( account_name=account_name, - market=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, - symbol=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, + connector_name=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, + trading_pair=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, status=filter_request.status, start_time=filter_request.start_time, end_time=filter_request.end_time, @@ -333,9 +333,9 @@ async def get_orders( # Apply filters for multiple values if filter_request.connector_names and len(filter_request.connector_names) > 1: - all_orders = [order for order in all_orders if order.get('market') in filter_request.connector_names] + all_orders = [order for order in all_orders if order.get('connector_name') in filter_request.connector_names] if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: - all_orders = [order for order in all_orders if order.get('symbol') in filter_request.trading_pairs] + all_orders = [order for order in all_orders if order.get('trading_pair') in filter_request.trading_pairs] # Sort by timestamp (most recent first) and then by cursor_id for consistency all_orders.sort(key=lambda x: (x.get('timestamp', 0), x.get('_cursor_id', '')), reverse=True) @@ -406,8 +406,8 @@ async def get_trades( try: trades = await accounts_service.get_trades( account_name=account_name, - market=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, - symbol=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, + connector_name=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, + trading_pair=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, trade_type=filter_request.trade_types[0] if filter_request.trade_types and len(filter_request.trade_types) == 1 else None, start_time=filter_request.start_time, end_time=filter_request.end_time, @@ -425,9 +425,9 @@ async def get_trades( # Apply filters for multiple values if filter_request.connector_names and len(filter_request.connector_names) > 1: - all_trades = [trade for trade in all_trades if trade.get('market') in filter_request.connector_names] + all_trades = [trade for trade in all_trades if trade.get('connector_name') in filter_request.connector_names] if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: - all_trades = [trade for trade in all_trades if trade.get('symbol') in filter_request.trading_pairs] + all_trades = [trade for trade in all_trades if trade.get('trading_pair') in filter_request.trading_pairs] if filter_request.trade_types and len(filter_request.trade_types) > 1: all_trades = [trade for trade in all_trades if trade.get('trade_type') in filter_request.trade_types] diff --git a/services/accounts_service.py b/services/accounts_service.py index 073621cc..46973e00 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -975,8 +975,8 @@ async def get_position_mode(self, account_name: str, connector_name: str) -> Dic logger.error(f"Failed to get position mode: {e}") raise HTTPException(status_code=500, detail=f"Failed to get position mode: {str(e)}") - async def get_orders(self, account_name: Optional[str] = None, market: Optional[str] = None, - symbol: Optional[str] = None, status: Optional[str] = None, + async def get_orders(self, account_name: Optional[str] = None, connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, status: Optional[str] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, limit: int = 100, offset: int = 0) -> List[Dict]: """Get order history using OrderRepository.""" @@ -987,8 +987,8 @@ async def get_orders(self, account_name: Optional[str] = None, market: Optional[ order_repo = OrderRepository(session) orders = await order_repo.get_orders( account_name=account_name, - connector_name=market, - trading_pair=symbol, + connector_name=connector_name, + trading_pair=trading_pair, status=status, start_time=start_time, end_time=end_time, @@ -1000,8 +1000,8 @@ async def get_orders(self, account_name: Optional[str] = None, market: Optional[ logger.error(f"Error getting orders: {e}") return [] - async def get_active_orders_history(self, account_name: Optional[str] = None, market: Optional[str] = None, - symbol: Optional[str] = None) -> List[Dict]: + async def get_active_orders_history(self, account_name: Optional[str] = None, connector_name: Optional[str] = None, + trading_pair: Optional[str] = None) -> List[Dict]: """Get active orders from database using OrderRepository.""" await self.ensure_db_initialized() @@ -1010,8 +1010,8 @@ async def get_active_orders_history(self, account_name: Optional[str] = None, ma order_repo = OrderRepository(session) orders = await order_repo.get_active_orders( account_name=account_name, - connector_name=market, - trading_pair=symbol + connector_name=connector_name, + trading_pair=trading_pair ) return [order_repo.to_dict(order) for order in orders] except Exception as e: @@ -1042,8 +1042,8 @@ async def get_orders_summary(self, account_name: Optional[str] = None, start_tim "fill_rate": 0, } - async def get_trades(self, account_name: Optional[str] = None, market: Optional[str] = None, - symbol: Optional[str] = None, trade_type: Optional[str] = None, + async def get_trades(self, account_name: Optional[str] = None, connector_name: Optional[str] = None, + trading_pair: Optional[str] = None, trade_type: Optional[str] = None, start_time: Optional[int] = None, end_time: Optional[int] = None, limit: int = 100, offset: int = 0) -> List[Dict]: """Get trade history using TradeRepository.""" @@ -1054,8 +1054,8 @@ async def get_trades(self, account_name: Optional[str] = None, market: Optional[ trade_repo = TradeRepository(session) trade_order_pairs = await trade_repo.get_trades_with_orders( account_name=account_name, - connector_name=market, - trading_pair=symbol, + connector_name=connector_name, + trading_pair=trading_pair, trade_type=trade_type, start_time=start_time, end_time=end_time, diff --git a/utils/hummingbot_database_reader.py b/utils/hummingbot_database_reader.py index 5fedd6ee..ea7b526f 100644 --- a/utils/hummingbot_database_reader.py +++ b/utils/hummingbot_database_reader.py @@ -53,23 +53,21 @@ def get_orders(self): with self.session_maker() as session: query = "SELECT * FROM 'Order'" orders = pd.read_sql_query(text(query), session.connection()) - orders["market"] = orders["market"] orders["amount"] = orders["amount"] / 1e6 orders["price"] = orders["price"] / 1e6 - # orders['creation_timestamp'] = pd.to_datetime(orders['creation_timestamp'], unit="ms") - # orders['last_update_timestamp'] = pd.to_datetime(orders['last_update_timestamp'], unit="ms") + orders.rename(columns={"market": "connector_name", "symbol": "trading_pair"}, inplace=True) return orders def get_trade_fills(self): - groupers = ["config_file_path", "market", "symbol"] + groupers = ["config_file_path", "connector_name", "trading_pair"] float_cols = ["amount", "price", "trade_fee_in_quote"] with self.session_maker() as session: query = "SELECT * FROM TradeFill" trade_fills = pd.read_sql_query(text(query), session.connection()) + trade_fills.rename(columns={"market": "connector_name", "symbol": "trading_pair"}, inplace=True) trade_fills[float_cols] = trade_fills[float_cols] / 1e6 trade_fills["cum_fees_in_quote"] = trade_fills.groupby(groupers)["trade_fee_in_quote"].cumsum() trade_fills["trade_fee"] = trade_fills.groupby(groupers)["cum_fees_in_quote"].diff() - # trade_fills["timestamp"] = pd.to_datetime(trade_fills["timestamp"], unit="ms") return trade_fills def get_order_status(self): @@ -113,7 +111,7 @@ def calculate_trade_based_performance(self) -> pd.DataFrame: return pd.DataFrame() # Sort by timestamp to ensure proper rolling calculation - trades = trades.sort_values(['symbol', 'market', 'timestamp']).copy() + trades = trades.sort_values(['trading_pair', 'connector_name', 'timestamp']).copy() # Create buy/sell indicator columns trades['is_buy'] = (trades['trade_type'].str.upper() == 'BUY').astype(int) @@ -125,8 +123,8 @@ def calculate_trade_based_performance(self) -> pd.DataFrame: trades['buy_value'] = trades['price'] * trades['amount'] * trades['is_buy'] trades['sell_value'] = trades['price'] * trades['amount'] * trades['is_sell'] - # Group by symbol and market for rolling calculations - grouper = ['symbol', 'market'] + # Group by trading_pair and connector_name for rolling calculations + grouper = ['trading_pair', 'connector_name'] # Calculate cumulative volumes and values trades['buy_volume'] = trades.groupby(grouper)['buy_amount'].cumsum() @@ -188,7 +186,7 @@ def calculate_trade_based_performance(self) -> pd.DataFrame: # Select and return relevant columns result_columns = [ - 'timestamp', 'price', 'amount', 'trade_type', 'symbol', 'market', + 'timestamp', 'price', 'amount', 'trade_type', 'trading_pair', 'connector_name', 'buy_avg_price', 'buy_volume', 'sell_avg_price', 'sell_volume', 'net_position', 'realized_trade_pnl_pct', 'realized_trade_pnl_quote', 'unrealized_trade_pnl_pct', 'unrealized_trade_pnl_quote', From b9c72cd39394ab5ed5e68515de6032cbd89d1178 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Thu, 10 Jul 2025 18:17:12 +0300 Subject: [PATCH 228/244] (feat) improve order book query and last price query in accounts --- services/accounts_service.py | 27 +++++++++++++-- services/market_data_feed_manager.py | 50 ++++++++++++++++++++++++++-- 2 files changed, 72 insertions(+), 5 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index 46973e00..ea7dc5b9 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -31,6 +31,9 @@ class AccountsService: "xrpl": "RLUSD", "kraken": "USD", } + + # Cache for storing last successful prices by trading pair + _last_known_prices = {} def __init__(self, account_update_interval: int = 5, @@ -233,16 +236,34 @@ async def _get_connector_tokens_info(self, connector, connector_name: str) -> Li return tokens_info async def _safe_get_last_traded_prices(self, connector, trading_pairs, timeout=10): - """Safely get last traded prices with timeout and error handling.""" + """Safely get last traded prices with timeout and error handling. Preserves previous prices on failure.""" try: last_traded = await asyncio.wait_for(connector.get_last_traded_prices(trading_pairs=trading_pairs), timeout=timeout) + + # Update cache with successful prices + for pair, price in last_traded.items(): + if price and price > 0: + self._last_known_prices[pair] = price + return last_traded except asyncio.TimeoutError: logger.error(f"Timeout getting last traded prices for trading pairs {trading_pairs}") - return {pair: Decimal("0") for pair in trading_pairs} + return self._get_fallback_prices(trading_pairs) except Exception as e: logger.error(f"Error getting last traded prices in connector {connector} for trading pairs {trading_pairs}: {e}") - return {pair: Decimal("0") for pair in trading_pairs} + return self._get_fallback_prices(trading_pairs) + + def _get_fallback_prices(self, trading_pairs): + """Get fallback prices using cached values, only setting to 0 if no previous price exists.""" + fallback_prices = {} + for pair in trading_pairs: + if pair in self._last_known_prices: + fallback_prices[pair] = self._last_known_prices[pair] + logger.info(f"Using cached price {self._last_known_prices[pair]} for {pair}") + else: + fallback_prices[pair] = Decimal("0") + logger.warning(f"No cached price available for {pair}, using 0") + return fallback_prices def get_connector_config_map(self, connector_name: str): """ diff --git a/services/market_data_feed_manager.py b/services/market_data_feed_manager.py index 3153b879..115734a3 100644 --- a/services/market_data_feed_manager.py +++ b/services/market_data_feed_manager.py @@ -414,12 +414,58 @@ async def get_order_book_query_result(self, connector_name: str, trading_pair: s elif 'quote_price' in kwargs: # Get quote volume for price result = order_book.get_quote_volume_for_price(is_buy, kwargs['quote_price']) + + # Check if quote crosses the book (no available volume at this price) + if result.result_volume is None or result.result_price is None: + # Get current market prices for comparison + snapshot = order_book.snapshot + best_bid = float(snapshot[0].iloc[0]["price"]) if not snapshot[0].empty else None + best_ask = float(snapshot[1].iloc[0]["price"]) if not snapshot[1].empty else None + mid_price = (best_bid + best_ask) / 2 if best_bid and best_ask else None + + # Determine if quote crosses the book + query_price = float(kwargs['quote_price']) + crossed_reason = None + suggested_price = None + + if is_buy: + # For buy orders, crossing occurs when price > best_ask + if best_ask and query_price > best_ask: + crossed_reason = f"Buy price {query_price} exceeds best ask {best_ask}" + suggested_price = best_ask + elif best_bid and query_price < best_bid: + crossed_reason = f"Buy price {query_price} below best bid {best_bid} - no liquidity available" + suggested_price = best_bid + else: + # For sell orders, crossing occurs when price < best_bid + if best_bid and query_price < best_bid: + crossed_reason = f"Sell price {query_price} below best bid {best_bid}" + suggested_price = best_bid + elif best_ask and query_price > best_ask: + crossed_reason = f"Sell price {query_price} above best ask {best_ask} - no liquidity available" + suggested_price = best_ask + + return { + "trading_pair": trading_pair, + "is_buy": is_buy, + "query_price": query_price, + "result_volume": None, + "result_quote_volume": None, + "crossed_book": True, + "crossed_reason": crossed_reason, + "best_bid": best_bid, + "best_ask": best_ask, + "mid_price": mid_price, + "suggested_price": suggested_price, + "timestamp": current_time + } + return { "trading_pair": trading_pair, "is_buy": is_buy, "query_price": kwargs['quote_price'], - "result_volume": float(result.result_volume) if result.result_volume else None, - "result_quote_volume": float(result.result_price) if result.result_price else None, # For quote volume queries, result_price contains the quote volume + "result_quote_volume": float(result.result_volume) if result.result_volume else None, + "crossed_book": False, "timestamp": current_time } From 4704916838e4a928772281464a6aac189048d7f9 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 01:05:07 +0300 Subject: [PATCH 229/244] (feat) add local candles config before refactro --- models/market_data.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/models/market_data.py b/models/market_data.py index 8b86291d..1f618ec2 100644 --- a/models/market_data.py +++ b/models/market_data.py @@ -1,7 +1,8 @@ -from typing import Dict, List, Optional, Any -from pydantic import BaseModel, Field from datetime import datetime from decimal import Decimal +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, Field class CandleData(BaseModel): @@ -13,6 +14,19 @@ class CandleData(BaseModel): close: float = Field(description="Closing price") volume: float = Field(description="Trading volume") +class CandlesConfigRequest(BaseModel): + """ + The CandlesConfig class is a data class that stores the configuration of a Candle object. + It has the following attributes: + - connector: str + - trading_pair: str + - interval: str + - max_records: int + """ + connector_name: str + trading_pair: str + interval: str = "1m" + max_records: int = 500 class CandlesResponse(BaseModel): """Response for candles data""" From 9acc818c06018e3ddd2d7f4f2539ed4f75d3c33a Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 01:05:28 +0300 Subject: [PATCH 230/244] (feat) adapt config to endpoint --- routers/market_data.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/routers/market_data.py b/routers/market_data.py index bb862cdb..214cb636 100644 --- a/routers/market_data.py +++ b/routers/market_data.py @@ -2,8 +2,10 @@ import time from fastapi import APIRouter, Request, HTTPException, Depends -from hummingbot.data_feed.candles_feed.data_types import CandlesConfig, HistoricalCandlesConfig +from hummingbot.data_feed.candles_feed.data_types import HistoricalCandlesConfig, CandlesConfig from hummingbot.data_feed.candles_feed.candles_factory import CandlesFactory + +from models.market_data import CandlesConfigRequest from services.market_data_feed_manager import MarketDataFeedManager from models import ( PriceRequest, PricesResponse, FundingInfoRequest, FundingInfoResponse, @@ -17,7 +19,7 @@ @router.post("/candles") -async def get_candles(request: Request, candles_config: CandlesConfig): +async def get_candles(request: Request, candles_config: CandlesConfigRequest): """ Get real-time candles data for a specific trading pair. @@ -36,7 +38,10 @@ async def get_candles(request: Request, candles_config: CandlesConfig): market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager # Get or create the candles feed (this will start it automatically and track access time) - candles_feed = market_data_feed_manager.get_candles_feed(candles_config) + candles_cfg = CandlesConfig( + connector=candles_config.connector_name, trading_pair=candles_config.trading_pair, + interval=candles_config.interval, max_records=candles_config.max_records) + candles_feed = market_data_feed_manager.get_candles_feed(candles_cfg) # Wait for the candles feed to be ready while not candles_feed.ready: From 6cf2a1e1b4590e885945413c0cfdc7107f1f4145 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 01:05:43 +0300 Subject: [PATCH 231/244] (feat) improve trading oruter --- routers/trading.py | 371 ++++++++++++++++++++++++++++----------------- 1 file changed, 234 insertions(+), 137 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index 872eae81..37e8d114 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -1,38 +1,49 @@ import logging from typing import Dict, List, Optional -from fastapi import APIRouter, HTTPException, Depends +from fastapi import APIRouter, Depends, HTTPException # Create module-specific logger logger = logging.getLogger(__name__) +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType from pydantic import BaseModel -from hummingbot.core.data_type.common import PositionMode, TradeType, OrderType, PositionAction from starlette import status -from services.accounts_service import AccountsService from deps import get_accounts_service, get_market_data_feed_manager -from models import TradeRequest, TradeResponse, OrderFilterRequest, ActiveOrderFilterRequest, PositionFilterRequest, FundingPaymentFilterRequest, TradeFilterRequest, PaginatedResponse -from models.accounts import PositionModeRequest, LeverageRequest +from models import ( + ActiveOrderFilterRequest, + FundingPaymentFilterRequest, + OrderFilterRequest, + PaginatedResponse, + PositionFilterRequest, + TradeFilterRequest, + TradeRequest, + TradeResponse, +) +from models.accounts import LeverageRequest, PositionModeRequest +from services.accounts_service import AccountsService router = APIRouter(tags=["Trading"], prefix="/trading") # Trade Execution @router.post("/orders", response_model=TradeResponse, status_code=status.HTTP_201_CREATED) -async def place_trade(trade_request: TradeRequest, - accounts_service: AccountsService = Depends(get_accounts_service), - market_data_manager = Depends(get_market_data_feed_manager)): +async def place_trade( + trade_request: TradeRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + market_data_manager=Depends(get_market_data_feed_manager), +): """ Place a buy or sell order using a specific account and connector. - + Args: trade_request: Trading request with account, connector, trading pair, type, amount, etc. accounts_service: Injected accounts service market_data_manager: Market data manager for price fetching - + Returns: TradeResponse with order ID and trading details - + Raises: HTTPException: 400 for invalid parameters, 404 for account/connector not found, 500 for trade execution errors """ @@ -41,7 +52,7 @@ async def place_trade(trade_request: TradeRequest, trade_type_enum = TradeType[trade_request.trade_type] order_type_enum = OrderType[trade_request.order_type] position_action_enum = PositionAction[trade_request.position_action] - + order_id = await accounts_service.place_trade( account_name=trade_request.account_name, connector_name=trade_request.connector_name, @@ -51,9 +62,9 @@ async def place_trade(trade_request: TradeRequest, order_type=order_type_enum, price=trade_request.price, position_action=position_action_enum, - market_data_manager=market_data_manager + market_data_manager=market_data_manager, ) - + return TradeResponse( order_id=order_id, account_name=trade_request.account_name, @@ -63,16 +74,21 @@ async def place_trade(trade_request: TradeRequest, amount=trade_request.amount, order_type=trade_request.order_type, price=trade_request.price, - status="submitted" + status="submitted", ) except HTTPException: raise except Exception as e: raise HTTPException(status_code=500, detail=f"Unexpected error placing trade: {str(e)}") + @router.post("/{account_name}/{connector_name}/orders/{client_order_id}/cancel") -async def cancel_order(account_name: str, connector_name: str, client_order_id: str, - accounts_service: AccountsService = Depends(get_accounts_service)): +async def cancel_order( + account_name: str, + connector_name: str, + client_order_id: str, + accounts_service: AccountsService = Depends(get_accounts_service), +): """ Cancel a specific order by its client order ID. @@ -91,9 +107,7 @@ async def cancel_order(account_name: str, connector_name: str, client_order_id: """ try: cancelled_order_id = await accounts_service.cancel_order( - account_name=account_name, - connector_name=connector_name, - client_order_id=client_order_id + account_name=account_name, connector_name=connector_name, client_order_id=client_order_id ) return {"message": f"Order cancellation initiated for {cancelled_order_id}"} except HTTPException: @@ -101,11 +115,9 @@ async def cancel_order(account_name: str, connector_name: str, client_order_id: except Exception as e: raise HTTPException(status_code=500, detail=f"Error cancelling order: {str(e)}") + @router.post("/positions", response_model=PaginatedResponse) -async def get_positions( - filter_request: PositionFilterRequest, - accounts_service: AccountsService = Depends(get_accounts_service) -): +async def get_positions(filter_request: PositionFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service)): """ Get current positions across all or filtered perpetual connectors. @@ -131,7 +143,11 @@ async def get_positions( for account_name in accounts_to_check: if account_name in all_connectors: # Filter connectors - connectors_to_check = filter_request.connector_names if filter_request.connector_names else list(all_connectors[account_name].keys()) + connectors_to_check = ( + filter_request.connector_names + if filter_request.connector_names + else list(all_connectors[account_name].keys()) + ) for connector_name in connectors_to_check: # Only fetch positions from perpetual connectors @@ -145,11 +161,12 @@ async def get_positions( except Exception as e: # Log error but continue with other connectors import logging + logger.warning(f"Failed to get positions for {account_name}/{connector_name}: {e}") # Sort by cursor_id for consistent pagination all_positions.sort(key=lambda x: x.get("_cursor_id", "")) - + # Apply cursor-based pagination start_index = 0 if filter_request.cursor: @@ -158,15 +175,15 @@ async def get_positions( if position.get("_cursor_id") == filter_request.cursor: start_index = i + 1 break - + # Get page of results end_index = start_index + filter_request.limit page_positions = all_positions[start_index:end_index] - + # Determine next cursor and has_more has_more = end_index < len(all_positions) next_cursor = page_positions[-1].get("_cursor_id") if page_positions and has_more else None - + # Clean up cursor_id from response data for position in page_positions: position.pop("_cursor_id", None) @@ -177,30 +194,28 @@ async def get_positions( "limit": filter_request.limit, "has_more": has_more, "next_cursor": next_cursor, - "total_count": len(all_positions) - } + "total_count": len(all_positions), + }, ) except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching positions: {str(e)}") - # Active Orders Management - Real-time from connectors @router.post("/orders/active", response_model=PaginatedResponse) async def get_active_orders( - filter_request: ActiveOrderFilterRequest, - accounts_service: AccountsService = Depends(get_accounts_service) + filter_request: ActiveOrderFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get active (in-flight) orders across all or filtered accounts and connectors. This endpoint fetches real-time active orders directly from the connectors' in_flight_orders property, providing current order status, fill amounts, and other live order data. - + Args: filter_request: JSON payload with filtering criteria - + Returns: Paginated response with active order data and pagination metadata @@ -217,7 +232,11 @@ async def get_active_orders( for account_name in accounts_to_check: if account_name in all_connectors: # Filter connectors - connectors_to_check = filter_request.connector_names if filter_request.connector_names else list(all_connectors[account_name].keys()) + connectors_to_check = ( + filter_request.connector_names + if filter_request.connector_names + else list(all_connectors[account_name].keys()) + ) for connector_name in connectors_to_check: if connector_name in all_connectors[account_name]: @@ -225,29 +244,26 @@ async def get_active_orders( connector = all_connectors[account_name][connector_name] # Get in-flight orders directly from connector in_flight_orders = connector.in_flight_orders - + for client_order_id, order in in_flight_orders.items(): # Apply trading pair filter if specified if filter_request.trading_pairs and order.trading_pair not in filter_request.trading_pairs: continue - - # Convert to JSON format for API response - order_dict = order.to_json() - order_dict.update({ - "account_name": account_name, - "connector_name": connector_name, - "_cursor_id": client_order_id # Use client_order_id as cursor - }) - all_active_orders.append(order_dict) - + + # Convert to standardized format to match orders search response + standardized_order = _standardize_in_flight_order_response(order, account_name, connector_name) + standardized_order["_cursor_id"] = client_order_id # Use client_order_id as cursor + all_active_orders.append(standardized_order) + except Exception as e: # Log error but continue with other connectors import logging + logger.warning(f"Failed to get active orders for {account_name}/{connector_name}: {e}") # Sort by cursor_id for consistent pagination all_active_orders.sort(key=lambda x: x.get("_cursor_id", "")) - + # Apply cursor-based pagination start_index = 0 if filter_request.cursor: @@ -256,15 +272,15 @@ async def get_active_orders( if order.get("_cursor_id") == filter_request.cursor: start_index = i + 1 break - + # Get page of results end_index = start_index + filter_request.limit page_orders = all_active_orders[start_index:end_index] - + # Determine next cursor and has_more has_more = end_index < len(all_active_orders) next_cursor = page_orders[-1].get("_cursor_id") if page_orders and has_more else None - + # Clean up cursor_id from response data for order in page_orders: order.pop("_cursor_id", None) @@ -275,8 +291,8 @@ async def get_active_orders( "limit": filter_request.limit, "has_more": has_more, "next_cursor": next_cursor, - "total_count": len(all_active_orders) - } + "total_count": len(all_active_orders), + }, ) except Exception as e: @@ -285,22 +301,19 @@ async def get_active_orders( # Historical Order Management - From registry/database @router.post("/orders/search", response_model=PaginatedResponse) -async def get_orders( - filter_request: OrderFilterRequest, - accounts_service: AccountsService = Depends(get_accounts_service) -): +async def get_orders(filter_request: OrderFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service)): """ Get historical order data across all or filtered accounts from the database/registry. - + Args: filter_request: JSON payload with filtering criteria - + Returns: Paginated response with historical order data and pagination metadata """ try: all_orders = [] - + # Determine which accounts to query if filter_request.account_names: accounts_to_check = filter_request.account_names @@ -308,14 +321,22 @@ async def get_orders( # Get all accounts all_connectors = accounts_service.connector_manager.get_all_connectors() accounts_to_check = list(all_connectors.keys()) - + # Collect orders from all specified accounts for account_name in accounts_to_check: try: orders = await accounts_service.get_orders( account_name=account_name, - connector_name=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, - trading_pair=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, + connector_name=( + filter_request.connector_names[0] + if filter_request.connector_names and len(filter_request.connector_names) == 1 + else None + ), + trading_pair=( + filter_request.trading_pairs[0] + if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 + else None + ), status=filter_request.status, start_time=filter_request.start_time, end_time=filter_request.end_time, @@ -329,17 +350,18 @@ async def get_orders( except Exception as e: # Log error but continue with other accounts import logging + logger.warning(f"Failed to get orders for {account_name}: {e}") - + # Apply filters for multiple values if filter_request.connector_names and len(filter_request.connector_names) > 1: - all_orders = [order for order in all_orders if order.get('connector_name') in filter_request.connector_names] + all_orders = [order for order in all_orders if order.get("connector_name") in filter_request.connector_names] if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: - all_orders = [order for order in all_orders if order.get('trading_pair') in filter_request.trading_pairs] - + all_orders = [order for order in all_orders if order.get("trading_pair") in filter_request.trading_pairs] + # Sort by timestamp (most recent first) and then by cursor_id for consistency - all_orders.sort(key=lambda x: (x.get('timestamp', 0), x.get('_cursor_id', '')), reverse=True) - + all_orders.sort(key=lambda x: (x.get("timestamp", 0), x.get("_cursor_id", "")), reverse=True) + # Apply cursor-based pagination start_index = 0 if filter_request.cursor: @@ -348,15 +370,15 @@ async def get_orders( if order.get("_cursor_id") == filter_request.cursor: start_index = i + 1 break - + # Get page of results end_index = start_index + filter_request.limit page_orders = all_orders[start_index:end_index] - + # Determine next cursor and has_more has_more = end_index < len(all_orders) next_cursor = page_orders[-1].get("_cursor_id") if page_orders and has_more else None - + # Clean up cursor_id from response data for order in page_orders: order.pop("_cursor_id", None) @@ -367,32 +389,28 @@ async def get_orders( "limit": filter_request.limit, "has_more": has_more, "next_cursor": next_cursor, - "total_count": len(all_orders) - } + "total_count": len(all_orders), + }, ) except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching orders: {str(e)}") - # Trade History @router.post("/trades", response_model=PaginatedResponse) -async def get_trades( - filter_request: TradeFilterRequest, - accounts_service: AccountsService = Depends(get_accounts_service) -): +async def get_trades(filter_request: TradeFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service)): """ Get trade history across all or filtered accounts with complex filtering. - + Args: filter_request: JSON payload with filtering criteria - + Returns: Paginated response with trade data and pagination metadata """ try: all_trades = [] - + # Determine which accounts to query if filter_request.account_names: accounts_to_check = filter_request.account_names @@ -400,15 +418,27 @@ async def get_trades( # Get all accounts all_connectors = accounts_service.connector_manager.get_all_connectors() accounts_to_check = list(all_connectors.keys()) - + # Collect trades from all specified accounts for account_name in accounts_to_check: try: trades = await accounts_service.get_trades( account_name=account_name, - connector_name=filter_request.connector_names[0] if filter_request.connector_names and len(filter_request.connector_names) == 1 else None, - trading_pair=filter_request.trading_pairs[0] if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 else None, - trade_type=filter_request.trade_types[0] if filter_request.trade_types and len(filter_request.trade_types) == 1 else None, + connector_name=( + filter_request.connector_names[0] + if filter_request.connector_names and len(filter_request.connector_names) == 1 + else None + ), + trading_pair=( + filter_request.trading_pairs[0] + if filter_request.trading_pairs and len(filter_request.trading_pairs) == 1 + else None + ), + trade_type=( + filter_request.trade_types[0] + if filter_request.trade_types and len(filter_request.trade_types) == 1 + else None + ), start_time=filter_request.start_time, end_time=filter_request.end_time, limit=filter_request.limit * 2, # Get more for filtering @@ -421,19 +451,20 @@ async def get_trades( except Exception as e: # Log error but continue with other accounts import logging + logger.warning(f"Failed to get trades for {account_name}: {e}") - + # Apply filters for multiple values if filter_request.connector_names and len(filter_request.connector_names) > 1: - all_trades = [trade for trade in all_trades if trade.get('connector_name') in filter_request.connector_names] + all_trades = [trade for trade in all_trades if trade.get("connector_name") in filter_request.connector_names] if filter_request.trading_pairs and len(filter_request.trading_pairs) > 1: - all_trades = [trade for trade in all_trades if trade.get('trading_pair') in filter_request.trading_pairs] + all_trades = [trade for trade in all_trades if trade.get("trading_pair") in filter_request.trading_pairs] if filter_request.trade_types and len(filter_request.trade_types) > 1: - all_trades = [trade for trade in all_trades if trade.get('trade_type') in filter_request.trade_types] - + all_trades = [trade for trade in all_trades if trade.get("trade_type") in filter_request.trade_types] + # Sort by timestamp (most recent first) and then by cursor_id for consistency - all_trades.sort(key=lambda x: (x.get('timestamp', 0), x.get('_cursor_id', '')), reverse=True) - + all_trades.sort(key=lambda x: (x.get("timestamp", 0), x.get("_cursor_id", "")), reverse=True) + # Apply cursor-based pagination start_index = 0 if filter_request.cursor: @@ -442,15 +473,15 @@ async def get_trades( if trade.get("_cursor_id") == filter_request.cursor: start_index = i + 1 break - + # Get page of results end_index = start_index + filter_request.limit page_trades = all_trades[start_index:end_index] - + # Determine next cursor and has_more has_more = end_index < len(all_trades) next_cursor = page_trades[-1].get("_cursor_id") if page_trades and has_more else None - + # Clean up cursor_id from response data for trade in page_trades: trade.pop("_cursor_id", None) @@ -461,33 +492,31 @@ async def get_trades( "limit": filter_request.limit, "has_more": has_more, "next_cursor": next_cursor, - "total_count": len(all_trades) - } + "total_count": len(all_trades), + }, ) except Exception as e: raise HTTPException(status_code=500, detail=f"Error fetching trades: {str(e)}") - - @router.post("/{account_name}/{connector_name}/position-mode") async def set_position_mode( - account_name: str, - connector_name: str, + account_name: str, + connector_name: str, request: PositionModeRequest, - accounts_service: AccountsService = Depends(get_accounts_service) + accounts_service: AccountsService = Depends(get_accounts_service), ): """ Set position mode for a perpetual connector. - + Args: account_name: Name of the account connector_name: Name of the perpetual connector position_mode: Position mode to set (HEDGE or ONEWAY) - + Returns: Success message with status - + Raises: HTTPException: 400 if not a perpetual connector or invalid position mode """ @@ -498,8 +527,7 @@ async def set_position_mode( return result except KeyError: raise HTTPException( - status_code=400, - detail=f"Invalid position mode '{request.position_mode}'. Must be 'HEDGE' or 'ONEWAY'" + status_code=400, detail=f"Invalid position mode '{request.position_mode}'. Must be 'HEDGE' or 'ONEWAY'" ) except HTTPException: raise @@ -509,20 +537,18 @@ async def set_position_mode( @router.get("/{account_name}/{connector_name}/position-mode") async def get_position_mode( - account_name: str, - connector_name: str, - accounts_service: AccountsService = Depends(get_accounts_service) + account_name: str, connector_name: str, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get current position mode for a perpetual connector. - + Args: account_name: Name of the account connector_name: Name of the perpetual connector - + Returns: Dictionary with current position mode, connector name, and account name - + Raises: HTTPException: 400 if not a perpetual connector """ @@ -537,32 +563,29 @@ async def get_position_mode( @router.post("/{account_name}/{connector_name}/leverage") async def set_leverage( - account_name: str, - connector_name: str, + account_name: str, + connector_name: str, request: LeverageRequest, - accounts_service: AccountsService = Depends(get_accounts_service) + accounts_service: AccountsService = Depends(get_accounts_service), ): """ Set leverage for a specific trading pair on a perpetual connector. - + Args: account_name: Name of the account connector_name: Name of the perpetual connector request: Leverage request with trading pair and leverage value accounts_service: Injected accounts service - + Returns: Dictionary with success status and message - + Raises: HTTPException: 400 for invalid parameters or non-perpetual connector, 404 for account/connector not found, 500 for execution errors """ try: result = await accounts_service.set_leverage( - account_name=account_name, - connector_name=connector_name, - trading_pair=request.trading_pair, - leverage=request.leverage + account_name=account_name, connector_name=connector_name, trading_pair=request.trading_pair, leverage=request.leverage ) return result except HTTPException: @@ -570,10 +593,10 @@ async def set_leverage( except Exception as e: raise HTTPException(status_code=500, detail=f"Unexpected error setting leverage: {str(e)}") + @router.post("/funding-payments", response_model=PaginatedResponse) async def get_funding_payments( - filter_request: FundingPaymentFilterRequest, - accounts_service: AccountsService = Depends(get_accounts_service) + filter_request: FundingPaymentFilterRequest, accounts_service: AccountsService = Depends(get_accounts_service) ): """ Get funding payment history across all or filtered perpetual connectors. @@ -600,7 +623,11 @@ async def get_funding_payments( for account_name in accounts_to_check: if account_name in all_connectors: # Filter connectors - connectors_to_check = filter_request.connector_names if filter_request.connector_names else list(all_connectors[account_name].keys()) + connectors_to_check = ( + filter_request.connector_names + if filter_request.connector_names + else list(all_connectors[account_name].keys()) + ) for connector_name in connectors_to_check: # Only fetch funding payments from perpetual connectors @@ -610,20 +637,23 @@ async def get_funding_payments( account_name=account_name, connector_name=connector_name, trading_pair=filter_request.trading_pair, - limit=filter_request.limit * 2 # Get more for pagination + limit=filter_request.limit * 2, # Get more for pagination ) # Add cursor-friendly identifier to each payment for payment in payments: - payment["_cursor_id"] = f"{account_name}:{connector_name}:{payment.get('timestamp', '')}:{payment.get('trading_pair', '')}" + payment["_cursor_id"] = ( + f"{account_name}:{connector_name}:{payment.get('timestamp', '')}:{payment.get('trading_pair', '')}" + ) all_funding_payments.extend(payments) except Exception as e: # Log error but continue with other connectors import logging + logger.warning(f"Failed to get funding payments for {account_name}/{connector_name}: {e}") # Sort by timestamp (most recent first) and then by cursor_id for consistency all_funding_payments.sort(key=lambda x: (x.get("timestamp", ""), x.get("_cursor_id", "")), reverse=True) - + # Apply cursor-based pagination start_index = 0 if filter_request.cursor: @@ -632,15 +662,15 @@ async def get_funding_payments( if payment.get("_cursor_id") == filter_request.cursor: start_index = i + 1 break - + # Get page of results end_index = start_index + filter_request.limit page_payments = all_funding_payments[start_index:end_index] - + # Determine next cursor and has_more has_more = end_index < len(all_funding_payments) next_cursor = page_payments[-1].get("_cursor_id") if page_payments and has_more else None - + # Clean up cursor_id from response data for payment in page_payments: payment.pop("_cursor_id", None) @@ -651,9 +681,76 @@ async def get_funding_payments( "limit": filter_request.limit, "has_more": has_more, "next_cursor": next_cursor, - "total_count": len(all_funding_payments) - } + "total_count": len(all_funding_payments), + }, ) except Exception as e: - raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") \ No newline at end of file + raise HTTPException(status_code=500, detail=f"Error fetching funding payments: {str(e)}") + + +def _standardize_in_flight_order_response(order, account_name: str, connector_name: str) -> dict: + """ + Convert a Hummingbot InFlightOrder to standardized format matching the orders search response. + + Args: + order: Hummingbot InFlightOrder instance + account_name: Name of the account + connector_name: Name of the connector + + Returns: + Dictionary with standardized order format + """ + # Map OrderState to status strings + from hummingbot.core.data_type.in_flight_order import OrderState + + status_mapping = { + OrderState.PENDING_CREATE: "SUBMITTED", + OrderState.OPEN: "OPEN", + OrderState.PENDING_CANCEL: "OPEN", # Still open until cancelled + OrderState.CANCELED: "CANCELLED", + OrderState.PARTIALLY_FILLED: "PARTIALLY_FILLED", + OrderState.FILLED: "FILLED", + OrderState.FAILED: "FAILED", + OrderState.PENDING_APPROVAL: "SUBMITTED", + OrderState.APPROVED: "SUBMITTED", + OrderState.CREATED: "SUBMITTED", + OrderState.COMPLETED: "FILLED", + } + + # Get status string + status = status_mapping.get(order.current_state, "SUBMITTED") + + # Convert timestamps to ISO format + from datetime import datetime, timezone + + created_at = datetime.fromtimestamp(order.creation_timestamp, tz=timezone.utc).isoformat() + updated_at = datetime.fromtimestamp( + getattr(order, "last_update_timestamp", order.creation_timestamp), tz=timezone.utc + ).isoformat() + + return { + "order_id": order.client_order_id, + "account_name": account_name, + "connector_name": connector_name, + "trading_pair": order.trading_pair, + "trade_type": order.trade_type.name, + "order_type": order.order_type.name, + "amount": float(order.amount), + "price": float(order.price) if order.price else None, + "status": status, + "filled_amount": float(getattr(order, "executed_amount_base", 0) or 0), + "average_fill_price": ( + float(getattr(order, "last_executed_price", 0) or 0) if getattr(order, "last_executed_price", None) else None + ), + "fee_paid": ( + float(getattr(order, "cumulative_fee_paid_quote", 0) or 0) + if getattr(order, "cumulative_fee_paid_quote", None) + else None + ), + "fee_currency": None, # InFlightOrder doesn't store fee currency directly + "created_at": created_at, + "updated_at": updated_at, + "exchange_order_id": order.exchange_order_id, + "error_message": None, # InFlightOrder doesn't store error messages + } From 6c0052b43b03acdace52e667ac0a5e353510a77c Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 01:06:08 +0300 Subject: [PATCH 232/244] (feat) add clock and loader of previous transactions --- services/accounts_service.py | 20 ++++++++++++++++++-- services/orders_recorder.py | 36 ++++++++++++++++++++++++++++++++++-- 2 files changed, 52 insertions(+), 4 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index ea7dc5b9..ec3e79b6 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -2,6 +2,9 @@ import logging from datetime import datetime, timezone +from hummingbot.core.clock import Clock +from hummingbot.core.clock_mode import ClockMode + # Create module-specific logger logger = logging.getLogger(__name__) from decimal import Decimal @@ -54,9 +57,11 @@ def __init__(self, # Database setup for account states and orders self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False + self.clock = Clock(ClockMode.REALTIME, tick_size=1.0) + self._clock_task: Optional[asyncio.Task] = None # Initialize connector manager with db_manager - self.connector_manager = ConnectorManager(self.secrets_manager, self.db_manager) + self.connector_manager = ConnectorManager(self.secrets_manager, self.clock, self.db_manager) async def ensure_db_initialized(self): """Ensure database is initialized before using it.""" @@ -82,7 +87,13 @@ def start(self): """ # Start the update loop which will call check_all_connectors self._update_account_state_task = asyncio.create_task(self.update_account_state_loop()) - + self._clock_task = asyncio.create_task(self._run_clock()) + + async def _run_clock(self): + """Run the clock system.""" + with self.clock as clock: + await clock.run() + async def stop(self): """ Stop all accounts service tasks and cleanup resources. @@ -95,6 +106,11 @@ async def stop(self): self._update_account_state_task.cancel() self._update_account_state_task = None logger.info("Stopped account state update loop") + + if self._clock_task is not None: + self._clock_task.cancel() + self._clock_task = None + logger.info("Stopped clock task") # Stop all connectors through the ConnectorManager await self.connector_manager.stop_all_connectors() diff --git a/services/orders_recorder.py b/services/orders_recorder.py index 0b4257ae..caa6d759 100644 --- a/services/orders_recorder.py +++ b/services/orders_recorder.py @@ -148,6 +148,26 @@ async def _handle_order_created(self, event: Union[BuyOrderCreatedEvent, SellOrd try: async with self.db_manager.get_session_context() as session: order_repo = OrderRepository(session) + + # Check if order already exists first + existing_order = await order_repo.get_order_by_client_id(event.order_id) + if existing_order: + logger.info(f"OrdersRecorder: Order {event.order_id} already exists with status {existing_order.status}") + + # Update exchange_order_id if we have it now and it was missing + exchange_order_id = getattr(event, 'exchange_order_id', None) + if exchange_order_id and not existing_order.exchange_order_id: + existing_order.exchange_order_id = exchange_order_id + logger.info(f"OrdersRecorder: Updated exchange_order_id to {exchange_order_id} for order {event.order_id}") + + # Update status if it's still in PENDING_CREATE or similar early state + if existing_order.status in ["PENDING_CREATE", "PENDING"]: + existing_order.status = "SUBMITTED" + logger.info(f"OrdersRecorder: Updated status from {existing_order.status} to SUBMITTED for order {event.order_id}") + + await session.flush() + return + order_data = { "client_order_id": event.order_id, "account_name": self.account_name, @@ -323,8 +343,20 @@ async def _handle_order_failed(self, event: Any): "error_message": self._extract_error_message(event) } - await order_repo.create_order(order_data) - logger.info(f"Created failed order record for {event.order_id}") + try: + await order_repo.create_order(order_data) + logger.info(f"Created failed order record for {event.order_id}") + except Exception as create_error: + # If creation fails due to duplicate key, try to update existing order + if "duplicate key" in str(create_error).lower() or "unique constraint" in str(create_error).lower(): + logger.info(f"Order {event.order_id} already exists, updating status to FAILED") + await order_repo.update_order_status( + client_order_id=event.order_id, + status="FAILED", + error_message=self._extract_error_message(event) + ) + else: + raise create_error except Exception as e: logger.error(f"Error recording order failure: {e}") From 3d04c01ce60097e636cd9376fff0e24bfcd404cc Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 01:06:23 +0300 Subject: [PATCH 233/244] (feat) formatting --- utils/connector_manager.py | 248 ++++++++++++++++++++++++++----------- 1 file changed, 176 insertions(+), 72 deletions(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 3a956bf2..3eec0f5b 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -1,7 +1,11 @@ import asyncio import logging +import time +from decimal import Decimal from typing import Dict, List, Optional +from hummingbot.core.clock import Clock, ClockMode + # Create module-specific logger logger = logging.getLogger(__name__) @@ -10,12 +14,11 @@ from hummingbot.client.config.config_helpers import ClientConfigAdapter, ReadOnlyClientConfigAdapter, get_connector_class from hummingbot.client.settings import AllConnectorSettings from hummingbot.connector.connector_base import ConnectorBase -from hummingbot.connector.exchange_py_base import ExchangePyBase -from hummingbot.core.data_type.common import PositionMode -from hummingbot.core.utils.async_utils import safe_ensure_future +from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType +from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState -from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter from utils.file_system import FileSystemUtil, fs_util +from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter from utils.security import BackendAPISecurity @@ -25,37 +28,39 @@ class ConnectorManager: Handles connector configuration and initialization. This is the single source of truth for all connector instances. """ - - def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): + + def __init__(self, secrets_manager: ETHKeyFileSecretManger, clock: Clock, db_manager=None): self.secrets_manager = secrets_manager self.db_manager = db_manager self._connector_cache: Dict[str, ConnectorBase] = {} self._orders_recorders: Dict[str, any] = {} self._funding_recorders: Dict[str, any] = {} - + self._status_polling_tasks: Dict[str, asyncio.Task] = {} + self.clock = clock + async def get_connector(self, account_name: str, connector_name: str): """ Get the connector object for the specified account and connector. Uses caching to avoid recreating connectors unnecessarily. Ensures proper initialization including position mode setup. - + :param account_name: The name of the account. :param connector_name: The name of the connector. :return: The connector object. """ cache_key = f"{account_name}:{connector_name}" - + if cache_key in self._connector_cache: return self._connector_cache[cache_key] - + # Create connector with full initialization connector = await self._create_and_initialize_connector(account_name, connector_name) return connector - + def _create_connector(self, account_name: str, connector_name: str): """ Create a new connector instance. - + :param account_name: The name of the account. :param connector_name: The name of the connector. :return: The connector object. @@ -64,31 +69,31 @@ def _create_connector(self, account_name: str, connector_name: str): client_config_map = ClientConfigAdapter(ClientConfigMap()) conn_setting = AllConnectorSettings.get_connector_settings()[connector_name] keys = BackendAPISecurity.api_keys(connector_name) - + # Debug logging logger.info(f"Creating connector {connector_name} for account {account_name}") logger.debug(f"API keys retrieved: {list(keys.keys()) if keys else 'None'}") - + read_only_config = ReadOnlyClientConfigAdapter.lock_config(client_config_map) - + init_params = conn_setting.conn_init_parameters( trading_pairs=[], trading_required=True, api_keys=keys, client_config_map=read_only_config, ) - + # Debug logging logger.debug(f"Init params keys: {list(init_params.keys())}") - + connector_class = get_connector_class(connector_name) connector = connector_class(**init_params) return connector - + def clear_cache(self, account_name: Optional[str] = None, connector_name: Optional[str] = None): """ Clear the connector cache. - + :param account_name: If provided, only clear cache for this account. :param connector_name: If provided with account_name, only clear this specific connector. """ @@ -103,22 +108,22 @@ def clear_cache(self, account_name: Optional[str] = None, connector_name: Option else: # Clear entire cache self._connector_cache.clear() - + @staticmethod def get_connector_config_map(connector_name: str): """ Get the connector config map for the specified connector. - + :param connector_name: The name of the connector. :return: The connector config map. """ connector_config = HummingbotAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) return [key for key in connector_config.hb_config.__fields__.keys() if key != "connector"] - + async def update_connector_keys(self, account_name: str, connector_name: str, keys: dict): """ Update the API keys for a connector and refresh the connector instance. - + :param account_name: The name of the account. :param connector_name: The name of the connector. :param keys: Dictionary of API keys to update. @@ -126,27 +131,27 @@ async def update_connector_keys(self, account_name: str, connector_name: str, ke """ BackendAPISecurity.login_account(account_name=account_name, secrets_manager=self.secrets_manager) connector_config = HummingbotAPIConfigAdapter(AllConnectorSettings.get_connector_config_keys(connector_name)) - + for key, value in keys.items(): setattr(connector_config, key, value) - + BackendAPISecurity.update_connector_keys(account_name, connector_config) - + # Re-decrypt all credentials to ensure the new keys are available BackendAPISecurity.decrypt_all(account_name=account_name) - + # Clear the cache for this connector to force recreation with new keys self.clear_cache(account_name, connector_name) - + # Create and return new connector instance new_connector = await self.get_connector(account_name, connector_name) - + return new_connector - + def list_account_connectors(self, account_name: str) -> List[str]: """ List all initialized connectors for a specific account. - + :param account_name: The name of the account. :return: List of connector names. """ @@ -156,11 +161,11 @@ def list_account_connectors(self, account_name: str) -> List[str]: if acc_name == account_name: connectors.append(conn_name) return connectors - + def get_all_connectors(self) -> Dict[str, Dict[str, ConnectorBase]]: """ Get all connectors organized by account. - + :return: Dictionary mapping account names to their connectors. """ result = {} @@ -170,31 +175,32 @@ def get_all_connectors(self) -> Dict[str, Dict[str, ConnectorBase]]: result[account_name] = {} result[account_name][connector_name] = connector return result - + def is_connector_initialized(self, account_name: str, connector_name: str) -> bool: """ Check if a connector is already initialized and cached. - + :param account_name: The name of the account. :param connector_name: The name of the connector. :return: True if the connector is initialized, False otherwise. """ cache_key = f"{account_name}:{connector_name}" return cache_key in self._connector_cache - + async def _create_and_initialize_connector(self, account_name: str, connector_name: str) -> ConnectorBase: """ Create and fully initialize a connector with all necessary setup. This includes creating the connector, starting its network, setting up order recording, and configuring position mode for perpetual connectors. - + :param account_name: The name of the account. :param connector_name: The name of the connector. :return: The initialized connector instance. """ + cache_key = f"{account_name}:{connector_name}" # Create the base connector connector = self._create_connector(account_name, connector_name) - cache_key = f"{account_name}:{connector_name}" + self.clock.add_iterator(connector) # Initialize symbol map await connector._initialize_trading_pair_symbol_map() @@ -209,61 +215,150 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na await connector._update_positions() self._connector_cache[cache_key] = connector + + # Load existing orders from database before starting network + if self.db_manager: + await self._load_existing_orders_from_database(connector, account_name, connector_name) + # Start order tracking if db_manager is available if self.db_manager: if cache_key not in self._orders_recorders: # Import OrdersRecorder dynamically to avoid circular imports from services.orders_recorder import OrdersRecorder - + # Create and start orders recorder orders_recorder = OrdersRecorder(self.db_manager, account_name, connector_name) orders_recorder.start(connector) self._orders_recorders[cache_key] = orders_recorder - + # Start funding tracking for perpetual connectors if "_perpetual" in connector_name and cache_key not in self._funding_recorders: # Import FundingRecorder dynamically to avoid circular imports from services.funding_recorder import FundingRecorder - + # Create and start funding recorder funding_recorder = FundingRecorder(self.db_manager, account_name, connector_name) funding_recorder.start(connector) self._funding_recorders[cache_key] = funding_recorder - - # Start the connector's network without order book tracker - self._start_network_without_order_book(connector) + + # Network will be started automatically by the clock system (using patched start_network) logger.info(f"Initialized connector {connector_name} for account {account_name}") return connector - - def _start_network_without_order_book(self, connector: ExchangePyBase): + + + async def _load_existing_orders_from_database(self, connector: ConnectorBase, account_name: str, connector_name: str): """ - Start connector network tasks except the order book tracker. - This avoids issues when there are no trading pairs configured. + Load existing active orders from database and add them to connector's in_flight_orders. + This ensures that orders placed before an API restart can still be managed. + + :param connector: The connector instance to load orders into + :param account_name: The name of the account + :param connector_name: The name of the connector """ try: - # Start only the essential polling tasks if trading is required - connector._trading_rules_polling_task = safe_ensure_future(connector._trading_rules_polling_loop()) - connector._trading_fees_polling_task = safe_ensure_future(connector._trading_fees_polling_loop()) - connector._status_polling_task = safe_ensure_future(connector._status_polling_loop()) - connector._user_stream_tracker_task = connector._create_user_stream_tracker_task() - connector._user_stream_event_listener_task = safe_ensure_future(connector._user_stream_event_listener()) - connector._lost_orders_update_task = safe_ensure_future(connector._lost_orders_update_polling_loop()) - - logger.debug(f"Started connector network without order book tracker") - + # Import OrderRepository dynamically to avoid circular imports + from database import OrderRepository + + async with self.db_manager.get_session_context() as session: + order_repo = OrderRepository(session) + + # Get active orders from database for this account/connector + active_orders = await order_repo.get_active_orders(account_name=account_name, connector_name=connector_name) + + logger.info(f"Loading {len(active_orders)} existing active orders for {account_name}/{connector_name}") + + for order_record in active_orders: + try: + # Convert database order to InFlightOrder + in_flight_order = self._convert_db_order_to_in_flight_order(order_record) + + # Add to connector's in_flight_orders + connector.in_flight_orders[in_flight_order.client_order_id] = in_flight_order + + logger.debug(f"Loaded order {in_flight_order.client_order_id} from database into connector") + + except Exception as e: + logger.error(f"Error converting database order {order_record.client_order_id} to InFlightOrder: {e}") + continue + + logger.info( + f"Successfully loaded {len(connector.in_flight_orders)} in-flight orders for {account_name}/{connector_name}" + ) + except Exception as e: - logger.error(f"Error starting connector network without order book: {e}") - + logger.error(f"Error loading existing orders from database for {account_name}/{connector_name}: {e}") + + def _convert_db_order_to_in_flight_order(self, order_record) -> InFlightOrder: + """ + Convert a database Order record to a Hummingbot InFlightOrder object. + + :param order_record: Database Order model instance + :return: InFlightOrder instance + """ + # Map database status to OrderState + status_mapping = { + "SUBMITTED": OrderState.PENDING_CREATE, + "OPEN": OrderState.OPEN, + "PARTIALLY_FILLED": OrderState.PARTIALLY_FILLED, + "FILLED": OrderState.FILLED, + "CANCELLED": OrderState.CANCELED, + "FAILED": OrderState.FAILED, + } + + # Get the appropriate OrderState + order_state = status_mapping.get(order_record.status, OrderState.PENDING_CREATE) + + # Convert string enums to proper enum instances + try: + order_type = OrderType[order_record.order_type] + except (KeyError, ValueError): + logger.warning(f"Unknown order type '{order_record.order_type}', defaulting to LIMIT") + order_type = OrderType.LIMIT + + try: + trade_type = TradeType[order_record.trade_type] + except (KeyError, ValueError): + logger.warning(f"Unknown trade type '{order_record.trade_type}', defaulting to BUY") + trade_type = TradeType.BUY + + # Convert creation timestamp - use order creation time or current time as fallback + creation_timestamp = order_record.created_at.timestamp() if order_record.created_at else time.time() + + # Create InFlightOrder instance + in_flight_order = InFlightOrder( + client_order_id=order_record.client_order_id, + trading_pair=order_record.trading_pair, + order_type=order_type, + trade_type=trade_type, + amount=Decimal(str(order_record.amount)), + creation_timestamp=creation_timestamp, + price=Decimal(str(order_record.price)) if order_record.price else None, + exchange_order_id=order_record.exchange_order_id, + initial_state=order_state, + leverage=1, # Default leverage + position=PositionAction.NIL, # Default position action + ) + + # Update current state and filled amount if order has progressed + in_flight_order.current_state = order_state + if order_record.filled_amount: + in_flight_order.executed_amount_base = Decimal(str(order_record.filled_amount)) + if order_record.average_fill_price: + in_flight_order.last_executed_quantity = Decimal(str(order_record.filled_amount or 0)) + in_flight_order.last_executed_price = Decimal(str(order_record.average_fill_price)) + + return in_flight_order + async def stop_connector(self, account_name: str, connector_name: str): """ Stop a connector and its associated services. - + :param account_name: The name of the account. :param connector_name: The name of the connector. """ cache_key = f"{account_name}:{connector_name}" - + # Stop order recorder if exists if cache_key in self._orders_recorders: try: @@ -272,7 +367,7 @@ async def stop_connector(self, account_name: str, connector_name: str): logger.info(f"Stopped order recorder for {account_name}/{connector_name}") except Exception as e: logger.error(f"Error stopping order recorder for {account_name}/{connector_name}: {e}") - + # Stop funding recorder if exists if cache_key in self._funding_recorders: try: @@ -281,8 +376,17 @@ async def stop_connector(self, account_name: str, connector_name: str): logger.info(f"Stopped funding recorder for {account_name}/{connector_name}") except Exception as e: logger.error(f"Error stopping funding recorder for {account_name}/{connector_name}: {e}") - - # Stop connector network if exists + + # Stop manual status polling task if exists + if cache_key in self._status_polling_tasks: + try: + self._status_polling_tasks[cache_key].cancel() + del self._status_polling_tasks[cache_key] + logger.info(f"Stopped manual status polling for {account_name}/{connector_name}") + except Exception as e: + logger.error(f"Error stopping manual status polling for {account_name}/{connector_name}: {e}") + + # Stop connector netwowrk if exists if cache_key in self._connector_cache: try: connector = self._connector_cache[cache_key] @@ -290,27 +394,27 @@ async def stop_connector(self, account_name: str, connector_name: str): logger.info(f"Stopped connector network for {account_name}/{connector_name}") except Exception as e: logger.error(f"Error stopping connector network for {account_name}/{connector_name}: {e}") - + async def stop_all_connectors(self): """ Stop all connectors and their associated services. """ # Get all account/connector pairs pairs = [(k.split(":", 1)[0], k.split(":", 1)[1]) for k in self._connector_cache.keys()] - + # Stop each connector for account_name, connector_name in pairs: await self.stop_connector(account_name, connector_name) - + def list_available_credentials(self, account_name: str) -> List[str]: """ List all available connector credentials for an account. - + :param account_name: The name of the account. :return: List of connector names that have credentials. """ try: - files = fs_util.list_files(f'credentials/{account_name}/connectors') - return [file.replace('.yml', '') for file in files if file.endswith('.yml')] + files = fs_util.list_files(f"credentials/{account_name}/connectors") + return [file.replace(".yml", "") for file in files if file.endswith(".yml")] except FileNotFoundError: - return [] \ No newline at end of file + return [] From 0c8cc4ee722b9fc0c7010aa0fd63c044ec85b8ef Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 01:06:30 +0300 Subject: [PATCH 234/244] (feat) add patch to start network --- main.py | 45 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) diff --git a/main.py b/main.py index dc46c3e5..d0ba9ae8 100644 --- a/main.py +++ b/main.py @@ -21,6 +21,51 @@ def patched_save_to_yml(yml_path, cm): from hummingbot.client.config import config_helpers config_helpers.save_to_yml = patched_save_to_yml +# Monkey patch start_network to conditionally start order book tracker +async def patched_start_network(self): + """ + Patched version of start_network that conditionally starts the order book tracker. + Only starts order book tracker when trading pairs are configured to avoid issues. + """ + import logging + from hummingbot.core.utils.async_utils import safe_ensure_future + + logger = logging.getLogger(__name__) + logger.debug(f"Starting network for {self.__class__.__name__} (patched)") + + # Stop any existing network first + self._stop_network() + + # Check if we have trading pairs configured + has_trading_pairs = hasattr(self, '_trading_pairs') and len(self._trading_pairs) > 0 + + # Start order book tracker only if we have trading pairs + if has_trading_pairs: + logger.debug(f"Starting order book tracker for {self.__class__.__name__} with {len(self._trading_pairs)} trading pairs") + self.order_book_tracker.start() + else: + logger.debug(f"Skipping order book tracker for {self.__class__.__name__} - no trading pairs configured") + + # Start the essential polling tasks if trading is required + if self.is_trading_required: + try: + self._trading_rules_polling_task = safe_ensure_future(self._trading_rules_polling_loop()) + self._trading_fees_polling_task = safe_ensure_future(self._trading_fees_polling_loop()) + self._status_polling_task = safe_ensure_future(self._status_polling_loop()) + self._user_stream_tracker_task = self._create_user_stream_tracker_task() + self._user_stream_event_listener_task = safe_ensure_future(self._user_stream_event_listener()) + self._lost_orders_update_task = safe_ensure_future(self._lost_orders_update_polling_loop()) + + logger.debug(f"Started network tasks for {self.__class__.__name__}") + except Exception as e: + logger.error(f"Error starting network for {self.__class__.__name__}: {e}") + else: + logger.debug(f"Trading not required for {self.__class__.__name__}, skipping network start") + +# Apply the start_network patch - this will be applied to ExchangePyBase after import +from hummingbot.connector.exchange_py_base import ExchangePyBase +ExchangePyBase.start_network = patched_start_network + from hummingbot.core.rate_oracle.rate_oracle import RateOracle from fastapi import Depends, FastAPI, HTTPException, status From f0d4dc145ba0e093e11dac064dec60f7311b4d2b Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 02:06:14 +0300 Subject: [PATCH 235/244] (feat) improve performance router --- routers/archived_bots.py | 1 + utils/hummingbot_database_reader.py | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/routers/archived_bots.py b/routers/archived_bots.py index 6ff6c6ef..38b42d3b 100644 --- a/routers/archived_bots.py +++ b/routers/archived_bots.py @@ -110,6 +110,7 @@ async def get_database_performance(db_path: str): "final_realized_pnl_quote": float(final_row.get('realized_trade_pnl_quote', 0)), "final_unrealized_pnl_quote": float(final_row.get('unrealized_trade_pnl_quote', 0)), "total_fees_quote": float(performance_data['fees_quote'].sum()), + "total_volume_quote": float(performance_data['cum_volume_quote'].iloc[-1] if len(performance_data) > 0 else 0), "final_net_position": float(final_row.get('net_position', 0)), "trading_pairs": performance_data['trading_pair'].unique().tolist(), "connector_names": performance_data['connector_name'].unique().tolist() diff --git a/utils/hummingbot_database_reader.py b/utils/hummingbot_database_reader.py index ea7b526f..dca87c1b 100644 --- a/utils/hummingbot_database_reader.py +++ b/utils/hummingbot_database_reader.py @@ -184,13 +184,17 @@ def calculate_trade_based_performance(self) -> pd.DataFrame: trades['fees_quote'] ) + # Calculate cumulative volume in quote currency + trades['volume_quote'] = trades['price'] * trades['amount'] + trades['cum_volume_quote'] = trades.groupby(grouper)['volume_quote'].cumsum() + # Select and return relevant columns result_columns = [ 'timestamp', 'price', 'amount', 'trade_type', 'trading_pair', 'connector_name', 'buy_avg_price', 'buy_volume', 'sell_avg_price', 'sell_volume', 'net_position', 'realized_trade_pnl_pct', 'realized_trade_pnl_quote', 'unrealized_trade_pnl_pct', 'unrealized_trade_pnl_quote', - 'fees_quote', 'net_pnl_quote' + 'fees_quote', 'net_pnl_quote', 'volume_quote', 'cum_volume_quote' ] return trades[result_columns].sort_values('timestamp') From 58cfe2097c975665fef7bd71b71c1ae549119641 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 02:33:52 +0300 Subject: [PATCH 236/244] (feat) fix position in quote --- utils/hummingbot_database_reader.py | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/utils/hummingbot_database_reader.py b/utils/hummingbot_database_reader.py index dca87c1b..6a57d260 100644 --- a/utils/hummingbot_database_reader.py +++ b/utils/hummingbot_database_reader.py @@ -150,7 +150,7 @@ def calculate_trade_based_performance(self) -> pd.DataFrame: # Matched volume for realized PnL (minimum of buy and sell volumes) trades['matched_volume'] = pd.concat([trades['buy_volume'], trades['sell_volume']], axis=1).min(axis=1) - trades['realized_trade_pnl_quote'] = trades['realized_trade_pnl_pct'] * trades['matched_volume'] + trades['realized_trade_pnl_quote'] = trades['realized_trade_pnl_pct'] * trades['matched_volume'] * trades['buy_avg_price'] # Calculate unrealized PnL based on position direction # For long positions (net_position > 0): use current price vs buy_avg_price @@ -172,7 +172,23 @@ def calculate_trade_based_performance(self) -> pd.DataFrame: ).fillna(0) # Calculate unrealized PnL in quote currency - trades['unrealized_trade_pnl_quote'] = trades['unrealized_trade_pnl_pct'] * trades['net_position'].abs() + trades['unrealized_trade_pnl_quote'] = 0.0 + + # Long positions: use buy_avg_price as reference + long_mask = trades['net_position'] > 0 + trades.loc[long_mask, 'unrealized_trade_pnl_quote'] = ( + trades.loc[long_mask, 'unrealized_trade_pnl_pct'] * + trades.loc[long_mask, 'net_position'].abs() * + trades.loc[long_mask, 'buy_avg_price'] + ) + + # Short positions: use sell_avg_price as reference + short_mask = trades['net_position'] < 0 + trades.loc[short_mask, 'unrealized_trade_pnl_quote'] = ( + trades.loc[short_mask, 'unrealized_trade_pnl_pct'] * + trades.loc[short_mask, 'net_position'].abs() * + trades.loc[short_mask, 'sell_avg_price'] + ) # Fees are already in trade_fee_in_quote column trades['fees_quote'] = trades['trade_fee_in_quote'] From a4f844018ddd159045c25e1e58314ce6c87edeb6 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 16:35:44 +0300 Subject: [PATCH 237/244] (feat) move clock to accounts service --- utils/connector_manager.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 3eec0f5b..5f024a65 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -4,8 +4,6 @@ from decimal import Decimal from typing import Dict, List, Optional -from hummingbot.core.clock import Clock, ClockMode - # Create module-specific logger logger = logging.getLogger(__name__) @@ -29,14 +27,13 @@ class ConnectorManager: This is the single source of truth for all connector instances. """ - def __init__(self, secrets_manager: ETHKeyFileSecretManger, clock: Clock, db_manager=None): + def __init__(self, secrets_manager: ETHKeyFileSecretManger, db_manager=None): self.secrets_manager = secrets_manager self.db_manager = db_manager self._connector_cache: Dict[str, ConnectorBase] = {} self._orders_recorders: Dict[str, any] = {} self._funding_recorders: Dict[str, any] = {} self._status_polling_tasks: Dict[str, asyncio.Task] = {} - self.clock = clock async def get_connector(self, account_name: str, connector_name: str): """ @@ -200,7 +197,6 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na cache_key = f"{account_name}:{connector_name}" # Create the base connector connector = self._create_connector(account_name, connector_name) - self.clock.add_iterator(connector) # Initialize symbol map await connector._initialize_trading_pair_symbol_map() From 7b02071faf4de02dc9abfba1e56bd043f3392ad8 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 16:35:57 +0300 Subject: [PATCH 238/244] (feat) adapt cock and add state update after adding credentials --- services/accounts_service.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/services/accounts_service.py b/services/accounts_service.py index ec3e79b6..7bfcd7e3 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -61,7 +61,7 @@ def __init__(self, self._clock_task: Optional[asyncio.Task] = None # Initialize connector manager with db_manager - self.connector_manager = ConnectorManager(self.secrets_manager, self.clock, self.db_manager) + self.connector_manager = ConnectorManager(self.secrets_manager, self.db_manager) async def ensure_db_initialized(self): """Ensure database is initialized before using it.""" @@ -209,8 +209,6 @@ async def _ensure_account_connectors_initialized(self, account_name: str): except Exception as e: logger.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") - - async def update_account_state(self): """Update account state for all connectors.""" all_connectors = self.connector_manager.get_all_connectors() @@ -300,7 +298,9 @@ async def add_credentials(self, account_name: str, connector_name: str, credenti """ try: # Update the connector keys (this saves the credentials to file and validates them) - await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) + connector = await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) + self.clock.add_iterator(connector) + await self.update_account_state() except Exception as e: logger.error(f"Error adding connector credentials for account {account_name}: {e}") await self.delete_credentials(account_name, connector_name) From 0bbf6e4d72908d418f826d3ed836bd2b9ee76421 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 18:13:39 +0300 Subject: [PATCH 239/244] (feat) reorder market data init --- main.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/main.py b/main.py index d0ba9ae8..6dc39b13 100644 --- a/main.py +++ b/main.py @@ -129,6 +129,17 @@ async def lifespan(app: FastAPI): BackendAPISecurity.store_password_verification(secrets_manager) logging.info("Created password verification file for master_account") + # Initialize MarketDataProvider with empty connectors (will use non-trading connectors) + market_data_provider = MarketDataProvider(connectors={}) + + # Initialize MarketDataFeedManager with lifecycle management + market_data_feed_manager = MarketDataFeedManager( + market_data_provider=market_data_provider, + rate_oracle=RateOracle.get_instance(), + cleanup_interval=settings.market_data.cleanup_interval, + feed_timeout=settings.market_data.feed_timeout + ) + # Initialize services bots_orchestrator = BotsOrchestrator( broker_host=settings.broker.host, @@ -138,7 +149,8 @@ async def lifespan(app: FastAPI): ) accounts_service = AccountsService( - account_update_interval=settings.app.account_update_interval + account_update_interval=settings.app.account_update_interval, + market_data_feed_manager=market_data_feed_manager ) docker_service = DockerService() bot_archiver = BotArchiver( @@ -150,17 +162,6 @@ async def lifespan(app: FastAPI): # Initialize database await accounts_service.ensure_db_initialized() - # Initialize MarketDataProvider with empty connectors (will use non-trading connectors) - market_data_provider = MarketDataProvider(connectors={}) - - # Initialize MarketDataFeedManager with lifecycle management - market_data_feed_manager = MarketDataFeedManager( - market_data_provider=market_data_provider, - rate_oracle=RateOracle.get_instance(), - cleanup_interval=settings.market_data.cleanup_interval, - feed_timeout=settings.market_data.feed_timeout - ) - # Store services in app state app.state.bots_orchestrator = bots_orchestrator app.state.accounts_service = accounts_service From a577e538625e79df590fdd5d0d8b87eb63fd95e7 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 19:06:05 +0300 Subject: [PATCH 240/244] (feat) remove patch --- main.py | 86 +++++++++++------------ services/accounts_service.py | 128 +++++++++++++++++++++++++++-------- 2 files changed, 141 insertions(+), 73 deletions(-) diff --git a/main.py b/main.py index 6dc39b13..5ed4b379 100644 --- a/main.py +++ b/main.py @@ -22,49 +22,49 @@ def patched_save_to_yml(yml_path, cm): config_helpers.save_to_yml = patched_save_to_yml # Monkey patch start_network to conditionally start order book tracker -async def patched_start_network(self): - """ - Patched version of start_network that conditionally starts the order book tracker. - Only starts order book tracker when trading pairs are configured to avoid issues. - """ - import logging - from hummingbot.core.utils.async_utils import safe_ensure_future - - logger = logging.getLogger(__name__) - logger.debug(f"Starting network for {self.__class__.__name__} (patched)") - - # Stop any existing network first - self._stop_network() - - # Check if we have trading pairs configured - has_trading_pairs = hasattr(self, '_trading_pairs') and len(self._trading_pairs) > 0 - - # Start order book tracker only if we have trading pairs - if has_trading_pairs: - logger.debug(f"Starting order book tracker for {self.__class__.__name__} with {len(self._trading_pairs)} trading pairs") - self.order_book_tracker.start() - else: - logger.debug(f"Skipping order book tracker for {self.__class__.__name__} - no trading pairs configured") - - # Start the essential polling tasks if trading is required - if self.is_trading_required: - try: - self._trading_rules_polling_task = safe_ensure_future(self._trading_rules_polling_loop()) - self._trading_fees_polling_task = safe_ensure_future(self._trading_fees_polling_loop()) - self._status_polling_task = safe_ensure_future(self._status_polling_loop()) - self._user_stream_tracker_task = self._create_user_stream_tracker_task() - self._user_stream_event_listener_task = safe_ensure_future(self._user_stream_event_listener()) - self._lost_orders_update_task = safe_ensure_future(self._lost_orders_update_polling_loop()) - - logger.debug(f"Started network tasks for {self.__class__.__name__}") - except Exception as e: - logger.error(f"Error starting network for {self.__class__.__name__}: {e}") - else: - logger.debug(f"Trading not required for {self.__class__.__name__}, skipping network start") - -# Apply the start_network patch - this will be applied to ExchangePyBase after import -from hummingbot.connector.exchange_py_base import ExchangePyBase -ExchangePyBase.start_network = patched_start_network +# async def patched_start_network(self): +# """ +# Patched version of start_network that conditionally starts the order book tracker. +# Only starts order book tracker when trading pairs are configured to avoid issues. +# """ +# import logging +# from hummingbot.core.utils.async_utils import safe_ensure_future +# +# logger = logging.getLogger(__name__) +# logger.debug(f"Starting network for {self.__class__.__name__} (patched)") +# +# # Stop any existing network first +# self._stop_network() +# +# # Check if we have trading pairs configured +# has_trading_pairs = hasattr(self, '_trading_pairs') and len(self._trading_pairs) > 0 +# +# # Start order book tracker only if we have trading pairs +# if has_trading_pairs: +# logger.debug(f"Starting order book tracker for {self.__class__.__name__} with {len(self._trading_pairs)} trading pairs") +# self.order_book_tracker.start() +# else: +# logger.debug(f"Skipping order book tracker for {self.__class__.__name__} - no trading pairs configured") +# +# # Start the essential polling tasks if trading is required +# if self.is_trading_required: +# try: +# self._trading_rules_polling_task = safe_ensure_future(self._trading_rules_polling_loop()) +# self._trading_fees_polling_task = safe_ensure_future(self._trading_fees_polling_loop()) +# self._status_polling_task = safe_ensure_future(self._status_polling_loop()) +# self._user_stream_tracker_task = self._create_user_stream_tracker_task() +# self._user_stream_event_listener_task = safe_ensure_future(self._user_stream_event_listener()) +# self._lost_orders_update_task = safe_ensure_future(self._lost_orders_update_polling_loop()) +# +# logger.debug(f"Started network tasks for {self.__class__.__name__}") +# except Exception as e: +# logger.error(f"Error starting network for {self.__class__.__name__}: {e}") +# else: +# logger.debug(f"Trading not required for {self.__class__.__name__}, skipping network start") +# +# # Apply the start_network patch - this will be applied to ExchangePyBase after import +# from hummingbot.connector.exchange_py_base import ExchangePyBase +# ExchangePyBase.start_network = patched_start_network from hummingbot.core.rate_oracle.rate_oracle import RateOracle diff --git a/services/accounts_service.py b/services/accounts_service.py index 7bfcd7e3..b847d442 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,26 +1,23 @@ import asyncio import logging from datetime import datetime, timezone - -from hummingbot.core.clock import Clock -from hummingbot.core.clock_mode import ClockMode - -# Create module-specific logger -logger = logging.getLogger(__name__) from decimal import Decimal from typing import Dict, List, Optional from fastapi import HTTPException from hummingbot.client.config.config_crypt import ETHKeyFileSecretManger from hummingbot.core.data_type.common import OrderType, TradeType, PositionAction, PositionMode +from hummingbot.strategy_v2.executors.data_types import ConnectorPair from config import settings from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository, FundingRepository from services.market_data_feed_manager import MarketDataFeedManager from utils.connector_manager import ConnectorManager - from utils.file_system import fs_util +# Create module-specific logger +logger = logging.getLogger(__name__) + class AccountsService: """ @@ -35,30 +32,32 @@ class AccountsService: "kraken": "USD", } - # Cache for storing last successful prices by trading pair + # Cache for storing last successful prices by trading pair with timestamps _last_known_prices = {} + _price_update_interval = 60 # Update prices every 60 seconds def __init__(self, account_update_interval: int = 5, - default_quote: str = "USDT"): + default_quote: str = "USDT", + market_data_feed_manager: Optional[MarketDataFeedManager] = None): """ Initialize the AccountsService. Args: account_update_interval: How often to update account states in minutes (default: 5) default_quote: Default quote currency for trading pairs (default: "USDT") + market_data_feed_manager: Market data feed manager for price caching (optional) """ self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) self.accounts_state = {} self.update_account_state_interval = account_update_interval * 60 self.default_quote = default_quote + self.market_data_feed_manager = market_data_feed_manager self._update_account_state_task: Optional[asyncio.Task] = None # Database setup for account states and orders self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False - self.clock = Clock(ClockMode.REALTIME, tick_size=1.0) - self._clock_task: Optional[asyncio.Task] = None # Initialize connector manager with db_manager self.connector_manager = ConnectorManager(self.secrets_manager, self.db_manager) @@ -82,17 +81,11 @@ def get_default_market(self, token: str, connector_name: str) -> str: def start(self): """ Start the loop that updates the account state at a fixed interval. - Note: Balance updates are now handled automatically by connector.start_network() + Note: Balance updates are now handled by manual connector state updates. :return: """ # Start the update loop which will call check_all_connectors self._update_account_state_task = asyncio.create_task(self.update_account_state_loop()) - self._clock_task = asyncio.create_task(self._run_clock()) - - async def _run_clock(self): - """Run the clock system.""" - with self.clock as clock: - await clock.run() async def stop(self): """ @@ -106,11 +99,6 @@ async def stop(self): self._update_account_state_task.cancel() self._update_account_state_task = None logger.info("Stopped account state update loop") - - if self._clock_task is not None: - self._clock_task.cancel() - self._clock_task = None - logger.info("Stopped clock task") # Stop all connectors through the ConnectorManager await self.connector_manager.stop_all_connectors() @@ -120,12 +108,14 @@ async def stop(self): async def update_account_state_loop(self): """ The loop that updates the account state at a fixed interval. - Note: Balance updates are now handled automatically by connector.start_network() + This now includes manual connector state updates. :return: """ while True: try: await self.check_all_connectors() + # Update all connector states (balances, orders, positions, trading rules) + await self.connector_manager.update_all_connector_states() await self.update_account_state() await self.dump_account_state() except Exception as e: @@ -209,6 +199,50 @@ async def _ensure_account_connectors_initialized(self, account_name: str): except Exception as e: logger.error(f"Error initializing connector {connector_name} for account {account_name}: {e}") + def _initialize_rate_sources_for_pairs(self, connector_name: str, trading_pairs: List[str]): + """ + Helper method to initialize rate sources for trading pairs. + + :param connector_name: The name of the connector. + :param trading_pairs: List of trading pairs to initialize. + """ + if not trading_pairs or not self.market_data_feed_manager: + return + + try: + connector_pairs = [ConnectorPair(connector_name=connector_name, trading_pair=trading_pair) + for trading_pair in trading_pairs] + self.market_data_feed_manager.market_data_provider.initialize_rate_sources(connector_pairs) + logger.info(f"Initialized rate sources for {len(trading_pairs)} trading pairs in {connector_name}") + except Exception as e: + logger.error(f"Error initializing rate sources for {connector_name}: {e}") + + async def _initialize_price_tracking(self, account_name: str, connector_name: str, connector): + """ + Initialize price tracking for a connector's tokens using MarketDataProvider. + + :param account_name: The name of the account. + :param connector_name: The name of the connector. + :param connector: The connector instance. + """ + try: + # Get current balances to determine which tokens need price tracking + balances = connector.get_all_balances() + unique_tokens = [token for token, value in balances.items() if + value != Decimal("0") and token not in settings.banned_tokens and "USD" not in token] + + if unique_tokens: + # Create trading pairs for price tracking + trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens] + + # Initialize rate sources using helper method + self._initialize_rate_sources_for_pairs(connector_name, trading_pairs) + + logger.info(f"Initialized price tracking for {len(trading_pairs)} trading pairs in {connector_name} (Account: {account_name})") + + except Exception as e: + logger.error(f"Error initializing price tracking for {connector_name} in account {account_name}: {e}") + async def update_account_state(self): """Update account state for all connectors.""" all_connectors = self.connector_manager.get_all_connectors() @@ -218,19 +252,48 @@ async def update_account_state(self): self.accounts_state[account_name] = {} for connector_name, connector in connectors.items(): try: - tokens_info = await self._get_connector_tokens_info(connector, connector_name) + tokens_info = await self._get_connector_tokens_info(connector, connector_name, self.market_data_feed_manager) self.accounts_state[account_name][connector_name] = tokens_info except Exception as e: logger.error(f"Error updating balances for connector {connector_name} in account {account_name}: {e}") self.accounts_state[account_name][connector_name] = [] - async def _get_connector_tokens_info(self, connector, connector_name: str) -> List[Dict]: - """Get token info from a connector instance.""" + async def _get_connector_tokens_info(self, connector, connector_name: str, market_data_manager: Optional[MarketDataFeedManager] = None) -> List[Dict]: + """Get token info from a connector instance using cached prices when available.""" balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if value != Decimal("0") and key not in settings.banned_tokens] unique_tokens = [balance["token"] for balance in balances] trading_pairs = [self.get_default_market(token, connector_name) for token in unique_tokens if "USD" not in token] - last_traded_prices = await self._safe_get_last_traded_prices(connector, trading_pairs) + + # Try to get cached prices first, fallback to live prices if needed + prices_from_cache = {} + trading_pairs_need_update = [] + + if market_data_manager: + for trading_pair in trading_pairs: + try: + cached_price = market_data_manager.market_data_provider.get_rate(trading_pair) + if cached_price > 0: + prices_from_cache[trading_pair] = cached_price + else: + trading_pairs_need_update.append(trading_pair) + except Exception: + trading_pairs_need_update.append(trading_pair) + else: + trading_pairs_need_update = trading_pairs + + # Add new trading pairs to market data provider if they need updates + if trading_pairs_need_update: + self._initialize_rate_sources_for_pairs(connector_name, trading_pairs_need_update) + logger.info(f"Added {len(trading_pairs_need_update)} new trading pairs to market data provider: {trading_pairs_need_update}") + + # Get fresh prices for pairs not in cache or with stale/zero prices + fresh_prices = {} + if trading_pairs_need_update: + fresh_prices = await self._safe_get_last_traded_prices(connector, trading_pairs_need_update) + + # Combine cached and fresh prices + all_prices = {**prices_from_cache, **fresh_prices} tokens_info = [] for balance in balances: @@ -239,7 +302,8 @@ async def _get_connector_tokens_info(self, connector, connector_name: str) -> Li price = Decimal("1") else: market = self.get_default_market(balance["token"], connector_name) - price = Decimal(last_traded_prices.get(market, 0)) + price = Decimal(str(all_prices.get(market, 0))) + tokens_info.append({ "token": balance["token"], "units": float(balance["units"]), @@ -299,7 +363,11 @@ async def add_credentials(self, account_name: str, connector_name: str, credenti try: # Update the connector keys (this saves the credentials to file and validates them) connector = await self.connector_manager.update_connector_keys(account_name, connector_name, credentials) - self.clock.add_iterator(connector) + + # Initialize price tracking for this connector's tokens if market data manager is available + if self.market_data_feed_manager: + await self._initialize_price_tracking(account_name, connector_name, connector) + await self.update_account_state() except Exception as e: logger.error(f"Error adding connector credentials for account {account_name}: {e}") From e34a05810f5298e7c36ee3daf52269216eb6b338 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 19:07:37 +0300 Subject: [PATCH 241/244] (feat) improve connectors state management --- utils/connector_manager.py | 120 +++++++++++++++++++++++++++++++++++-- 1 file changed, 116 insertions(+), 4 deletions(-) diff --git a/utils/connector_manager.py b/utils/connector_manager.py index 5f024a65..6ae0bad6 100644 --- a/utils/connector_manager.py +++ b/utils/connector_manager.py @@ -14,8 +14,9 @@ from hummingbot.connector.connector_base import ConnectorBase from hummingbot.core.data_type.common import OrderType, PositionAction, PositionMode, TradeType from hummingbot.core.data_type.in_flight_order import InFlightOrder, OrderState +from hummingbot.core.utils.async_utils import safe_ensure_future -from utils.file_system import FileSystemUtil, fs_util +from utils.file_system import fs_util from utils.hummingbot_api_config_adapter import HummingbotAPIConfigAdapter from utils.security import BackendAPISecurity @@ -201,6 +202,9 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na # Initialize symbol map await connector._initialize_trading_pair_symbol_map() + # Update trading rules + await connector._update_trading_rules() + # Update initial balances await connector._update_balances() @@ -237,11 +241,119 @@ async def _create_and_initialize_connector(self, account_name: str, connector_na funding_recorder.start(connector) self._funding_recorders[cache_key] = funding_recorder - # Network will be started automatically by the clock system (using patched start_network) + # Start network manually without clock system + await self._start_connector_network(connector) + + # Perform initial update of connector state + await self._update_connector_state(connector, connector_name) logger.info(f"Initialized connector {connector_name} for account {account_name}") return connector + async def _start_connector_network(self, connector: ConnectorBase): + """ + Start connector network tasks manually without clock system. + Based on the original start_network method but without order book tracker. + """ + try: + # Stop any existing network tasks + await self._stop_connector_network(connector) + + # Start trading rules polling + connector._trading_rules_polling_task = safe_ensure_future(connector._trading_rules_polling_loop()) + + # Start trading fees polling + connector._trading_fees_polling_task = safe_ensure_future(connector._trading_fees_polling_loop()) + + # Start user stream tracker (websocket connection) + connector._user_stream_tracker_task = connector._create_user_stream_tracker_task() + + # Start user stream event listener + connector._user_stream_event_listener_task = safe_ensure_future(connector._user_stream_event_listener()) + + # Start lost orders update task + connector._lost_orders_update_task = safe_ensure_future(connector._lost_orders_update_polling_loop()) + + logger.info(f"Started connector network tasks for {connector}") + + except Exception as e: + logger.error(f"Error starting connector network: {e}") + raise + + async def _stop_connector_network(self, connector: ConnectorBase): + """ + Stop connector network tasks. + """ + try: + # Stop trading rules polling + if connector._trading_rules_polling_task: + connector._trading_rules_polling_task.cancel() + connector._trading_rules_polling_task = None + + # Stop trading fees polling + if connector._trading_fees_polling_task: + connector._trading_fees_polling_task.cancel() + connector._trading_fees_polling_task = None + + # Stop status polling + if connector._status_polling_task: + connector._status_polling_task.cancel() + connector._status_polling_task = None + + # Stop user stream tracker + if connector._user_stream_tracker_task: + connector._user_stream_tracker_task.cancel() + connector._user_stream_tracker_task = None + + # Stop user stream event listener + if connector._user_stream_event_listener_task: + connector._user_stream_event_listener_task.cancel() + connector._user_stream_event_listener_task = None + + # Stop lost orders update task + if connector._lost_orders_update_task: + connector._lost_orders_update_task.cancel() + connector._lost_orders_update_task = None + + except Exception as e: + logger.error(f"Error stopping connector network: {e}") + + async def _update_connector_state(self, connector: ConnectorBase, connector_name: str): + """ + Update connector state including balances, orders, positions, and trading rules. + This function can be called both during initialization and periodically. + """ + try: + # Update balances + await connector._update_balances() + + # Update trading rules + await connector._update_trading_rules() + + # Update positions for perpetual connectors + if "_perpetual" in connector_name: + await connector._update_positions() + + # Update order status for in-flight orders + if hasattr(connector, '_update_order_status') and connector.in_flight_orders: + await connector._update_order_status() + + logger.debug(f"Updated connector state for {connector_name}") + + except Exception as e: + logger.error(f"Error updating connector state for {connector_name}: {e}") + + async def update_all_connector_states(self): + """ + Update state for all cached connectors. + This can be called periodically to refresh connector data. + """ + for cache_key, connector in self._connector_cache.items(): + account_name, connector_name = cache_key.split(":", 1) + try: + await self._update_connector_state(connector, connector_name) + except Exception as e: + logger.error(f"Error updating state for {account_name}/{connector_name}: {e}") async def _load_existing_orders_from_database(self, connector: ConnectorBase, account_name: str, connector_name: str): """ @@ -382,11 +494,11 @@ async def stop_connector(self, account_name: str, connector_name: str): except Exception as e: logger.error(f"Error stopping manual status polling for {account_name}/{connector_name}: {e}") - # Stop connector netwowrk if exists + # Stop connector network if exists if cache_key in self._connector_cache: try: connector = self._connector_cache[cache_key] - await connector.stop_network() + await self._stop_connector_network(connector) logger.info(f"Stopped connector network for {account_name}/{connector_name}") except Exception as e: logger.error(f"Error stopping connector network for {account_name}/{connector_name}: {e}") From 8978cdab9b5f05b28285e46aca15f25750d7befb Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 19:08:57 +0300 Subject: [PATCH 242/244] (feat) update accounts with price before sending --- routers/portfolio.py | 1 + 1 file changed, 1 insertion(+) diff --git a/routers/portfolio.py b/routers/portfolio.py index 9164d39e..4a5b69bd 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -30,6 +30,7 @@ async def get_portfolio_state( Returns: Dict containing account states with connector balances and token information """ + await accounts_service.update_account_state() all_states = accounts_service.get_accounts_state() # Apply account name filter first From 5c2431a11d659fb0fb6b2357360ca4d9f6127b50 Mon Sep 17 00:00:00 2001 From: cardosofede Date: Fri, 11 Jul 2025 19:18:01 +0300 Subject: [PATCH 243/244] (feat) manage potential failures --- routers/trading.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/routers/trading.py b/routers/trading.py index 37e8d114..adf8e700 100644 --- a/routers/trading.py +++ b/routers/trading.py @@ -1,4 +1,6 @@ import logging +import math + from typing import Dict, List, Optional from fastapi import APIRouter, Depends, HTTPException @@ -736,18 +738,12 @@ def _standardize_in_flight_order_response(order, account_name: str, connector_na "trading_pair": order.trading_pair, "trade_type": order.trade_type.name, "order_type": order.order_type.name, - "amount": float(order.amount), - "price": float(order.price) if order.price else None, + "amount": float(order.amount) if order.amount and not math.isnan(float(order.amount)) else 0, + "price": float(order.price) if order.price and not math.isnan(float(order.price)) else None, "status": status, - "filled_amount": float(getattr(order, "executed_amount_base", 0) or 0), - "average_fill_price": ( - float(getattr(order, "last_executed_price", 0) or 0) if getattr(order, "last_executed_price", None) else None - ), - "fee_paid": ( - float(getattr(order, "cumulative_fee_paid_quote", 0) or 0) - if getattr(order, "cumulative_fee_paid_quote", None) - else None - ), + "filled_amount": float(getattr(order, "executed_amount_base", 0) or 0) if not math.isnan(float(getattr(order, "executed_amount_base", 0) or 0)) else 0, + "average_fill_price": float(getattr(order, "last_executed_price", 0)) if getattr(order, "last_executed_price", None) and not math.isnan(float(getattr(order, "last_executed_price", 0))) else None, + "fee_paid": float(getattr(order, "cumulative_fee_paid_quote", 0)) if getattr(order, "cumulative_fee_paid_quote", None) and not math.isnan(float(getattr(order, "cumulative_fee_paid_quote", 0))) else None, "fee_currency": None, # InFlightOrder doesn't store fee currency directly "created_at": created_at, "updated_at": updated_at, From e5b3b25de59e614e62147ec55d168ea7f6c318ac Mon Sep 17 00:00:00 2001 From: cardosofede Date: Sat, 12 Jul 2025 01:23:45 +0300 Subject: [PATCH 244/244] (feat) add management of failure by position mode --- bots/scripts/v2_with_controllers.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/bots/scripts/v2_with_controllers.py b/bots/scripts/v2_with_controllers.py index d345f0bf..0dd0d8ce 100644 --- a/bots/scripts/v2_with_controllers.py +++ b/bots/scripts/v2_with_controllers.py @@ -4,6 +4,8 @@ from hummingbot.client.hummingbot_application import HummingbotApplication from hummingbot.connector.connector_base import ConnectorBase + +from hummingbot.core.event.events import MarketOrderFailureEvent from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy.strategy_v2_base import StrategyV2Base, StrategyV2ConfigBase from hummingbot.strategy_v2.models.base import RunnableStatus @@ -145,3 +147,18 @@ def apply_initial_setting(self): trading_pair=config_dict["trading_pair"]) for connector_name, position_mode in connectors_position_mode.items(): self.connectors[connector_name].set_position_mode(position_mode) + + def did_fail_order(self, order_failed_event: MarketOrderFailureEvent): + """ + Handle order failure events by logging the error and stopping the strategy if necessary. + """ + if "position side" in order_failed_event.error_message.lower(): + connectors_position_mode = {} + for controller_id, controller in self.controllers.items(): + config_dict = controller.config.model_dump() + if "connector_name" in config_dict: + if self.is_perpetual(config_dict["connector_name"]): + if "position_mode" in config_dict: + connectors_position_mode[config_dict["connector_name"]] = config_dict["position_mode"] + for connector_name, position_mode in connectors_position_mode.items(): + self.connectors[connector_name].set_position_mode(position_mode)