diff --git a/.gitignore b/.gitignore index 68bc17f9..7856a1df 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,6 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + +# Hummingbot Gateway files +gateway-files/ diff --git a/README.md b/README.md index 5e5bd8dd..b1072c92 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,8 @@ Stores all trading data including: - Positions and funding payments - Performance metrics +**Note:** The database is automatically initialized using environment variables (`POSTGRES_USER`, `POSTGRES_DB`, `POSTGRES_PASSWORD`). The included `init-db.sql` script serves as a safety net for edge cases where automatic initialization doesn't complete properly. + ### 2. EMQX Message Broker Enables real-time communication with trading bots: - Receives live updates from running bots @@ -261,11 +263,92 @@ All API endpoints require HTTP Basic Authentication. Include your configured cre curl -u username:password http://localhost:8000/endpoint ``` +## Troubleshooting + +### Database Connection Issues + +If you encounter PostgreSQL database connection errors (such as "role 'hbot' does not exist" or "database 'hummingbot_api' does not exist"), use the automated fix script: + +```bash +chmod +x fix-database.sh +./fix-database.sh +``` + +This script will: +1. Check if PostgreSQL is running +2. Verify that the `hbot` user and `hummingbot_api` database exist +3. Automatically fix any missing configuration +4. Test the connection to ensure everything works + +#### Manual Database Verification + +If you prefer to check manually: + +```bash +# Check if containers are running +docker ps | grep -E "hummingbot-postgres|hummingbot-broker" + +# Check PostgreSQL logs +docker logs hummingbot-postgres + +# Verify database connection +docker exec -it hummingbot-postgres psql -U hbot -d hummingbot_api + +# If connection fails, run the initialization script +docker exec -i hummingbot-postgres psql -U postgres < init-db.sql +``` + +#### Complete Database Reset + +If you need to start fresh (⚠️ this will delete all data): + +```bash +# Stop all containers and remove volumes +docker compose down -v + +# Restart setup +./setup.sh +``` + +### EMQX Broker Issues + +If bots can't connect to the broker: + +```bash +# Check EMQX status +docker logs hummingbot-broker + +# Restart EMQX +docker compose restart emqx + +# Access EMQX dashboard (if needed) +# http://localhost:18083 +# Default credentials: admin/public +``` + +### Common Issues + +**Issue**: API won't start - "Database connection failed" +- **Solution**: Run `./fix-database.sh` to repair the database configuration + +**Issue**: Bot containers won't start +- **Solution**: Check Docker daemon is running and you have sufficient resources + +**Issue**: Can't access API at localhost:8000 +- **Solution**: Verify the API container is running: `docker ps | grep hummingbot-api` + +**Issue**: Authentication fails +- **Solution**: Check your USERNAME and PASSWORD in the `.env` file + +**Issue**: Old bot data causing conflicts +- **Solution**: Clean up old volumes: `docker compose down -v` (⚠️ deletes data) + ## Support & Documentation - **API Documentation**: Available at `http://localhost:8000/docs` when running - **Detailed Examples**: Check the `CLAUDE.md` file for comprehensive API usage examples - **Issues**: Report bugs and feature requests through the project's issue tracker +- **Database Troubleshooting**: Use `./fix-database.sh` for automated fixes --- Ready to start trading? Deploy your first account and start exploring the powerful capabilities of the Hummingbot API! \ No newline at end of file diff --git a/bots/controllers/directional_trading/bollingrid.py b/bots/controllers/directional_trading/bollingrid.py new file mode 100644 index 00000000..374d3676 --- /dev/null +++ b/bots/controllers/directional_trading/bollingrid.py @@ -0,0 +1,160 @@ +from decimal import Decimal +from typing import List + +import pandas_ta as ta # noqa: F401 +from pydantic import Field, field_validator +from pydantic_core.core_schema import ValidationInfo + +from hummingbot.core.data_type.common import TradeType +from hummingbot.data_feed.candles_feed.data_types import CandlesConfig +from hummingbot.strategy_v2.controllers.directional_trading_controller_base import ( + DirectionalTradingControllerBase, + DirectionalTradingControllerConfigBase, +) +from hummingbot.strategy_v2.executors.grid_executor.data_types import GridExecutorConfig + + +class BollinGridControllerConfig(DirectionalTradingControllerConfigBase): + controller_name: str = "bollingrid" + candles_config: List[CandlesConfig] = [] + candles_connector: str = Field( + default=None, + json_schema_extra={ + "prompt": "Enter the connector for the candles data, leave empty to use the same exchange as the connector: ", + "prompt_on_new": True}) + candles_trading_pair: str = Field( + default=None, + json_schema_extra={ + "prompt": "Enter the trading pair for the candles data, leave empty to use the same trading pair as the connector: ", + "prompt_on_new": True}) + interval: str = Field( + default="3m", + json_schema_extra={ + "prompt": "Enter the candle interval (e.g., 1m, 5m, 1h, 1d): ", + "prompt_on_new": True}) + bb_length: int = Field( + default=100, + json_schema_extra={"prompt": "Enter the Bollinger Bands length: ", "prompt_on_new": True}) + bb_std: float = Field(default=2.0) + bb_long_threshold: float = Field(default=0.0) + bb_short_threshold: float = Field(default=1.0) + + # Grid-specific parameters + grid_start_price_coefficient: float = Field( + default=0.25, + json_schema_extra={"prompt": "Grid start price coefficient (multiplier of BB width): ", "prompt_on_new": True}) + grid_end_price_coefficient: float = Field( + default=0.75, + json_schema_extra={"prompt": "Grid end price coefficient (multiplier of BB width): ", "prompt_on_new": True}) + grid_limit_price_coefficient: float = Field( + default=0.35, + json_schema_extra={"prompt": "Grid limit price coefficient (multiplier of BB width): ", "prompt_on_new": True}) + min_spread_between_orders: Decimal = Field( + default=Decimal("0.005"), + json_schema_extra={"prompt": "Minimum spread between grid orders (e.g., 0.005 for 0.5%): ", "prompt_on_new": True}) + order_frequency: int = Field( + default=2, + json_schema_extra={"prompt": "Order frequency (seconds between grid orders): ", "prompt_on_new": True}) + max_orders_per_batch: int = Field( + default=1, + json_schema_extra={"prompt": "Maximum orders per batch: ", "prompt_on_new": True}) + min_order_amount_quote: Decimal = Field( + default=Decimal("6"), + json_schema_extra={"prompt": "Minimum order amount in quote currency: ", "prompt_on_new": True}) + max_open_orders: int = Field( + default=5, + json_schema_extra={"prompt": "Maximum number of open orders: ", "prompt_on_new": True}) + + @field_validator("candles_connector", mode="before") + @classmethod + def set_candles_connector(cls, v, validation_info: ValidationInfo): + if v is None or v == "": + return validation_info.data.get("connector_name") + return v + + @field_validator("candles_trading_pair", mode="before") + @classmethod + def set_candles_trading_pair(cls, v, validation_info: ValidationInfo): + if v is None or v == "": + return validation_info.data.get("trading_pair") + return v + + +class BollinGridController(DirectionalTradingControllerBase): + def __init__(self, config: BollinGridControllerConfig, *args, **kwargs): + self.config = config + self.max_records = self.config.bb_length + if len(self.config.candles_config) == 0: + self.config.candles_config = [CandlesConfig( + connector=config.candles_connector, + trading_pair=config.candles_trading_pair, + interval=config.interval, + max_records=self.max_records + )] + super().__init__(config, *args, **kwargs) + + async def update_processed_data(self): + df = self.market_data_provider.get_candles_df(connector_name=self.config.candles_connector, + trading_pair=self.config.candles_trading_pair, + interval=self.config.interval, + max_records=self.max_records) + # Add indicators + df.ta.bbands(length=self.config.bb_length, std=self.config.bb_std, append=True) + bbp = df[f"BBP_{self.config.bb_length}_{self.config.bb_std}"] + bb_width = df[f"BBB_{self.config.bb_length}_{self.config.bb_std}"] + + # Generate signal + long_condition = bbp < self.config.bb_long_threshold + short_condition = bbp > self.config.bb_short_threshold + + # Generate signal + df["signal"] = 0 + df.loc[long_condition, "signal"] = 1 + df.loc[short_condition, "signal"] = -1 + signal = df["signal"].iloc[-1] + close = df["close"].iloc[-1] + current_bb_width = bb_width.iloc[-1] / 100 + if signal == -1: + end_price = close * (1 + current_bb_width * self.config.grid_start_price_coefficient) + start_price = close * (1 - current_bb_width * self.config.grid_end_price_coefficient) + limit_price = close * (1 + current_bb_width * self.config.grid_limit_price_coefficient) + elif signal == 1: + start_price = close * (1 - current_bb_width * self.config.grid_start_price_coefficient) + end_price = close * (1 + current_bb_width * self.config.grid_end_price_coefficient) + limit_price = close * (1 - current_bb_width * self.config.grid_limit_price_coefficient) + else: + start_price = None + end_price = None + limit_price = None + + # Update processed data + self.processed_data["signal"] = df["signal"].iloc[-1] + self.processed_data["features"] = df + self.processed_data["grid_params"] = { + "start_price": start_price, + "end_price": end_price, + "limit_price": limit_price + } + + def get_executor_config(self, trade_type: TradeType, price: Decimal, amount: Decimal): + """ + Get the grid executor config based on the trade_type, price and amount. + Uses configurable grid parameters from the controller config. + """ + return GridExecutorConfig( + timestamp=self.market_data_provider.time(), + connector_name=self.config.connector_name, + trading_pair=self.config.trading_pair, + start_price=self.processed_data["grid_params"]["start_price"], + end_price=self.processed_data["grid_params"]["end_price"], + limit_price=self.processed_data["grid_params"]["limit_price"], + side=trade_type, + triple_barrier_config=self.config.triple_barrier_config, + leverage=self.config.leverage, + min_spread_between_orders=self.config.min_spread_between_orders, + total_amount_quote=amount * price, + order_frequency=self.config.order_frequency, + max_orders_per_batch=self.config.max_orders_per_batch, + min_order_amount_quote=self.config.min_order_amount_quote, + max_open_orders=self.config.max_open_orders, + ) diff --git a/bots/controllers/generic/arbitrage_controller.py b/bots/controllers/generic/arbitrage_controller.py index 825a8663..01837036 100644 --- a/bots/controllers/generic/arbitrage_controller.py +++ b/bots/controllers/generic/arbitrage_controller.py @@ -1,10 +1,11 @@ from decimal import Decimal -from typing import List +from typing import List, Optional import pandas as pd from hummingbot.client.ui.interface_utils import format_df_for_printout from hummingbot.core.data_type.common import MarketDict +from hummingbot.core.gateway.gateway_http_client import GatewayHttpClient from hummingbot.data_feed.candles_feed.data_types import CandlesConfig from hummingbot.strategy_v2.controllers.controller_base import ControllerBase, ControllerConfigBase from hummingbot.strategy_v2.executors.arbitrage_executor.data_types import ArbitrageExecutorConfig @@ -16,8 +17,8 @@ class ArbitrageControllerConfig(ControllerConfigBase): controller_name: str = "arbitrage_controller" candles_config: List[CandlesConfig] = [] - exchange_pair_1: ConnectorPair = ConnectorPair(connector_name="binance", trading_pair="PENGU-USDT") - exchange_pair_2: ConnectorPair = ConnectorPair(connector_name="solana_jupiter_mainnet-beta", trading_pair="PENGU-USDC") + exchange_pair_1: ConnectorPair = ConnectorPair(connector_name="binance", trading_pair="SOL-USDT") + exchange_pair_2: ConnectorPair = ConnectorPair(connector_name="jupiter/router", trading_pair="SOL-USDC") min_profitability: Decimal = Decimal("0.01") delay_between_executors: int = 10 # in seconds max_executors_imbalance: int = 1 @@ -29,15 +30,6 @@ def update_markets(self, markets: MarketDict) -> MarketDict: class ArbitrageController(ControllerBase): - gas_token_by_network = { - "ethereum": "ETH", - "solana": "SOL", - "binance-smart-chain": "BNB", - "polygon": "POL", - "avalanche": "AVAX", - "dexalot": "AVAX" - } - def __init__(self, config: ArbitrageControllerConfig, *args, **kwargs): self.config = config super().__init__(config, *args, **kwargs) @@ -47,16 +39,19 @@ def __init__(self, config: ArbitrageControllerConfig, *args, **kwargs): self._len_active_buy_arbitrages = 0 self._len_active_sell_arbitrages = 0 self.base_asset = self.config.exchange_pair_1.trading_pair.split("-")[0] + self._gas_token_cache = {} # Cache for gas tokens by connector + self._initialize_gas_tokens() # Fetch gas tokens during init self.initialize_rate_sources() def initialize_rate_sources(self): rates_required = [] for connector_pair in [self.config.exchange_pair_1, self.config.exchange_pair_2]: base, quote = connector_pair.trading_pair.split("-") - # Add rate source for gas token + + # Add rate source for gas token if it's an AMM connector if connector_pair.is_amm_connector(): gas_token = self.get_gas_token(connector_pair.connector_name) - if gas_token != quote: + if gas_token and gas_token != quote: rates_required.append(ConnectorPair(connector_name=self.config.rate_connector, trading_pair=f"{gas_token}-{quote}")) @@ -71,9 +66,48 @@ def initialize_rate_sources(self): if len(rates_required) > 0: self.market_data_provider.initialize_rate_sources(rates_required) - def get_gas_token(self, connector_name: str) -> str: - _, chain, _ = connector_name.split("_") - return self.gas_token_by_network[chain] + def _initialize_gas_tokens(self): + """Initialize gas tokens for AMM connectors during controller initialization.""" + import asyncio + + async def fetch_gas_tokens(): + for connector_pair in [self.config.exchange_pair_1, self.config.exchange_pair_2]: + if connector_pair.is_amm_connector(): + connector_name = connector_pair.connector_name + if connector_name not in self._gas_token_cache: + try: + gateway_client = GatewayHttpClient.get_instance() + + # Get chain and network for the connector + chain, network, error = await gateway_client.get_connector_chain_network( + connector_name + ) + + if error: + self.logger().warning(f"Failed to get chain info for {connector_name}: {error}") + continue + + # Get native currency symbol + native_currency = await gateway_client.get_native_currency_symbol(chain, network) + + if native_currency: + self._gas_token_cache[connector_name] = native_currency + self.logger().info(f"Gas token for {connector_name}: {native_currency}") + else: + self.logger().warning(f"Failed to get native currency for {connector_name}") + except Exception as e: + self.logger().error(f"Error getting gas token for {connector_name}: {e}") + + # Run the async function to fetch gas tokens + loop = asyncio.get_event_loop() + if loop.is_running(): + asyncio.create_task(fetch_gas_tokens()) + else: + loop.run_until_complete(fetch_gas_tokens()) + + def get_gas_token(self, connector_name: str) -> Optional[str]: + """Get the cached gas token for a connector.""" + return self._gas_token_cache.get(connector_name) async def update_processed_data(self): pass @@ -92,22 +126,33 @@ def determine_executor_actions(self) -> List[ExecutorAction]: if self._len_active_sell_arbitrages == 0: executor_actions.append(self.create_arbitrage_executor_action(self.config.exchange_pair_2, self.config.exchange_pair_1)) - return executor_actions + return [action for action in executor_actions if action is not None] def create_arbitrage_executor_action(self, buying_exchange_pair: ConnectorPair, selling_exchange_pair: ConnectorPair): try: if buying_exchange_pair.is_amm_connector(): gas_token = self.get_gas_token(buying_exchange_pair.connector_name) - pair = buying_exchange_pair.trading_pair.split("-")[0] + "-" + gas_token - gas_conversion_price = self.market_data_provider.get_rate(pair) + if gas_token: + pair = buying_exchange_pair.trading_pair.split("-")[0] + "-" + gas_token + gas_conversion_price = self.market_data_provider.get_rate(pair) + else: + gas_conversion_price = None elif selling_exchange_pair.is_amm_connector(): gas_token = self.get_gas_token(selling_exchange_pair.connector_name) - pair = selling_exchange_pair.trading_pair.split("-")[0] + "-" + gas_token - gas_conversion_price = self.market_data_provider.get_rate(pair) + if gas_token: + pair = selling_exchange_pair.trading_pair.split("-")[0] + "-" + gas_token + gas_conversion_price = self.market_data_provider.get_rate(pair) + else: + gas_conversion_price = None else: gas_conversion_price = None rate = self.market_data_provider.get_rate(self.base_asset + "-" + self.config.quote_conversion_asset) + if not rate: + self.logger().warning( + f"Cannot get conversion rate for {self.base_asset}-{self.config.quote_conversion_asset}. " + f"Skipping executor creation.") + return None amount_quantized = self.market_data_provider.quantize_order_amount( buying_exchange_pair.connector_name, buying_exchange_pair.trading_pair, self.config.total_amount_quote / rate) diff --git a/bots/controllers/generic/pmm_mister.py b/bots/controllers/generic/pmm_mister.py new file mode 100644 index 00000000..84931b64 --- /dev/null +++ b/bots/controllers/generic/pmm_mister.py @@ -0,0 +1,518 @@ +from decimal import Decimal +from typing import Dict, List, Optional, Set, Tuple, Union + +from pydantic import Field, field_validator +from pydantic_core.core_schema import ValidationInfo + +from hummingbot.core.data_type.common import MarketDict, OrderType, PositionMode, PriceType, TradeType +from hummingbot.data_feed.candles_feed.data_types import CandlesConfig +from hummingbot.strategy_v2.controllers.controller_base import ControllerBase, ControllerConfigBase +from hummingbot.strategy_v2.executors.data_types import ConnectorPair +from hummingbot.strategy_v2.executors.position_executor.data_types import PositionExecutorConfig, TripleBarrierConfig +from hummingbot.strategy_v2.models.executor_actions import CreateExecutorAction, ExecutorAction, StopExecutorAction + + +class PMMisterConfig(ControllerConfigBase): + """ + Advanced PMM (Pure Market Making) controller with sophisticated position management. + Features hanging executors, price distance requirements, and breakeven awareness. + """ + controller_type: str = "generic" + controller_name: str = "pmm_mister" + candles_config: List[CandlesConfig] = [] + connector_name: str = Field(default="binance") + trading_pair: str = Field(default="BTC-FDUSD") + portfolio_allocation: Decimal = Field(default=Decimal("0.05"), json_schema_extra={"is_updatable": True}) + target_base_pct: Decimal = Field(default=Decimal("0.2"), json_schema_extra={"is_updatable": True}) + min_base_pct: Decimal = Field(default=Decimal("0.1"), json_schema_extra={"is_updatable": True}) + max_base_pct: Decimal = Field(default=Decimal("0.4"), json_schema_extra={"is_updatable": True}) + buy_spreads: List[float] = Field(default="0.01,0.02", json_schema_extra={"is_updatable": True}) + sell_spreads: List[float] = Field(default="0.01,0.02", json_schema_extra={"is_updatable": True}) + buy_amounts_pct: Union[List[Decimal], None] = Field(default="1,2", json_schema_extra={"is_updatable": True}) + sell_amounts_pct: Union[List[Decimal], None] = Field(default="1,2", json_schema_extra={"is_updatable": True}) + executor_refresh_time: int = Field(default=30, json_schema_extra={"is_updatable": True}) + + # Enhanced timing parameters + buy_cooldown_time: int = Field(default=15, json_schema_extra={"is_updatable": True}) + sell_cooldown_time: int = Field(default=15, json_schema_extra={"is_updatable": True}) + buy_position_effectivization_time: int = Field(default=60, json_schema_extra={"is_updatable": True}) + sell_position_effectivization_time: int = Field(default=60, json_schema_extra={"is_updatable": True}) + + # Price distance requirements + min_buy_price_distance_pct: Decimal = Field(default=Decimal("0.003"), json_schema_extra={"is_updatable": True}) + min_sell_price_distance_pct: Decimal = Field(default=Decimal("0.003"), json_schema_extra={"is_updatable": True}) + + leverage: int = Field(default=20, json_schema_extra={"is_updatable": True}) + position_mode: PositionMode = Field(default="HEDGE") + take_profit: Optional[Decimal] = Field(default=Decimal("0.0001"), gt=0, json_schema_extra={"is_updatable": True}) + take_profit_order_type: Optional[OrderType] = Field(default="LIMIT_MAKER", json_schema_extra={"is_updatable": True}) + max_active_executors_by_level: Optional[int] = Field(default=4, json_schema_extra={"is_updatable": True}) + tick_mode: bool = Field(default=False, json_schema_extra={"is_updatable": True}) + + @field_validator("take_profit", mode="before") + @classmethod + def validate_target(cls, v): + if isinstance(v, str): + if v == "": + return None + return Decimal(v) + return v + + @field_validator('take_profit_order_type', mode="before") + @classmethod + def validate_order_type(cls, v) -> OrderType: + if isinstance(v, OrderType): + return v + elif v is None: + return OrderType.MARKET + elif isinstance(v, str): + if v.upper() in OrderType.__members__: + return OrderType[v.upper()] + elif isinstance(v, int): + try: + return OrderType(v) + except ValueError: + pass + raise ValueError(f"Invalid order type: {v}. Valid options are: {', '.join(OrderType.__members__)}") + + @field_validator('buy_spreads', 'sell_spreads', mode="before") + @classmethod + def parse_spreads(cls, v): + if v is None: + return [] + if isinstance(v, str): + if v == "": + return [] + return [float(x.strip()) for x in v.split(',')] + return v + + @field_validator('buy_amounts_pct', 'sell_amounts_pct', mode="before") + @classmethod + def parse_and_validate_amounts(cls, v, validation_info: ValidationInfo): + field_name = validation_info.field_name + if v is None or v == "": + spread_field = field_name.replace('amounts_pct', 'spreads') + return [1 for _ in validation_info.data[spread_field]] + if isinstance(v, str): + return [float(x.strip()) for x in v.split(',')] + elif isinstance(v, list) and len(v) != len(validation_info.data[field_name.replace('amounts_pct', 'spreads')]): + raise ValueError( + f"The number of {field_name} must match the number of {field_name.replace('amounts_pct', 'spreads')}.") + return v + + @field_validator('position_mode', mode="before") + @classmethod + def validate_position_mode(cls, v) -> PositionMode: + if isinstance(v, str): + if v.upper() in PositionMode.__members__: + return PositionMode[v.upper()] + raise ValueError(f"Invalid position mode: {v}. Valid options are: {', '.join(PositionMode.__members__)}") + return v + + @property + def triple_barrier_config(self) -> TripleBarrierConfig: + return TripleBarrierConfig( + take_profit=self.take_profit, + trailing_stop=None, + open_order_type=OrderType.LIMIT_MAKER, + take_profit_order_type=self.take_profit_order_type, + stop_loss_order_type=OrderType.MARKET, + time_limit_order_type=OrderType.MARKET + ) + + def get_cooldown_time(self, trade_type: TradeType) -> int: + """Get cooldown time for specific trade type""" + return self.buy_cooldown_time if trade_type == TradeType.BUY else self.sell_cooldown_time + + def get_position_effectivization_time(self, trade_type: TradeType) -> int: + """Get position effectivization time for specific trade type""" + return self.buy_position_effectivization_time if trade_type == TradeType.BUY else self.sell_position_effectivization_time + + def update_parameters(self, trade_type: TradeType, new_spreads: Union[List[float], str], + new_amounts_pct: Optional[Union[List[int], str]] = None): + spreads_field = 'buy_spreads' if trade_type == TradeType.BUY else 'sell_spreads' + amounts_pct_field = 'buy_amounts_pct' if trade_type == TradeType.BUY else 'sell_amounts_pct' + + setattr(self, spreads_field, self.parse_spreads(new_spreads)) + if new_amounts_pct is not None: + setattr(self, amounts_pct_field, + self.parse_and_validate_amounts(new_amounts_pct, self.__dict__, self.__fields__[amounts_pct_field])) + else: + setattr(self, amounts_pct_field, [1 for _ in getattr(self, spreads_field)]) + + def get_spreads_and_amounts_in_quote(self, trade_type: TradeType) -> Tuple[List[float], List[float]]: + buy_amounts_pct = getattr(self, 'buy_amounts_pct') + sell_amounts_pct = getattr(self, 'sell_amounts_pct') + + total_pct = sum(buy_amounts_pct) + sum(sell_amounts_pct) + + if trade_type == TradeType.BUY: + normalized_amounts_pct = [amt_pct / total_pct for amt_pct in buy_amounts_pct] + else: + normalized_amounts_pct = [amt_pct / total_pct for amt_pct in sell_amounts_pct] + + spreads = getattr(self, f'{trade_type.name.lower()}_spreads') + return spreads, [amt_pct * self.total_amount_quote * self.portfolio_allocation for amt_pct in normalized_amounts_pct] + + def update_markets(self, markets: MarketDict) -> MarketDict: + return markets.add_or_update(self.connector_name, self.trading_pair) + + +class PMMister(ControllerBase): + """ + Advanced PMM (Pure Market Making) controller with sophisticated position management. + Features: + - Hanging executors system for better position control + - Price distance requirements to prevent over-accumulation + - Breakeven awareness for dynamic parameter adjustment + - Separate buy/sell cooldown and effectivization times + """ + + def __init__(self, config: PMMisterConfig, *args, **kwargs): + super().__init__(config, *args, **kwargs) + self.config = config + self.market_data_provider.initialize_rate_sources( + [ConnectorPair(connector_name=config.connector_name, trading_pair=config.trading_pair)] + ) + + def determine_executor_actions(self) -> List[ExecutorAction]: + """ + Determine actions based on the current state with advanced position management. + """ + actions = [] + + # Create new executors + actions.extend(self.create_actions_proposal()) + + # Stop executors (refresh and early stop) + actions.extend(self.stop_actions_proposal()) + + return actions + + def should_effectivize_executor(self, executor_info, current_time: int) -> bool: + """Check if a hanging executor should be effectivized""" + level_id = executor_info.custom_info.get("level_id", "") + fill_time = executor_info.custom_info["open_order_last_update"] + if not level_id or not fill_time: + return False + + trade_type = self.get_trade_type_from_level_id(level_id) + effectivization_time = self.config.get_position_effectivization_time(trade_type) + + return current_time - fill_time >= effectivization_time + + def create_actions_proposal(self) -> List[ExecutorAction]: + """ + Create actions proposal with advanced position management logic. + """ + create_actions = [] + + # Get levels to execute with advanced logic + levels_to_execute = self.get_levels_to_execute() + + # Pre-calculate spreads and amounts + buy_spreads, buy_amounts_quote = self.config.get_spreads_and_amounts_in_quote(TradeType.BUY) + sell_spreads, sell_amounts_quote = self.config.get_spreads_and_amounts_in_quote(TradeType.SELL) + reference_price = Decimal(self.processed_data["reference_price"]) + + # Create executors for each level + for level_id in levels_to_execute: + trade_type = self.get_trade_type_from_level_id(level_id) + level = self.get_level_from_level_id(level_id) + + if trade_type == TradeType.BUY: + spread_in_pct = Decimal(buy_spreads[level]) * Decimal(self.processed_data["spread_multiplier"]) + amount_quote = Decimal(buy_amounts_quote[level]) + else: + spread_in_pct = Decimal(sell_spreads[level]) * Decimal(self.processed_data["spread_multiplier"]) + amount_quote = Decimal(sell_amounts_quote[level]) + + # Calculate price and amount + side_multiplier = Decimal("-1") if trade_type == TradeType.BUY else Decimal("1") + price = reference_price * (Decimal("1") + side_multiplier * spread_in_pct) + amount = self.market_data_provider.quantize_order_amount( + self.config.connector_name, + self.config.trading_pair, + (amount_quote / price) + ) + + if amount == Decimal("0"): + self.logger().warning(f"The amount of the level {level_id} is 0. Skipping.") + continue + + executor_config = self.get_executor_config(level_id, price, amount) + if executor_config is not None: + create_actions.append(CreateExecutorAction( + controller_id=self.config.id, + executor_config=executor_config + )) + + return create_actions + + def get_levels_to_execute(self) -> List[str]: + """ + Get levels to execute with advanced hanging executor logic using the analyzer. + """ + current_time = self.market_data_provider.time() + + # Analyze all levels to understand executor states + all_levels_analysis = self.analyze_all_levels() + + # Get working levels (active or hanging with cooldown) + working_levels_ids = [] + + for analysis in all_levels_analysis: + level_id = analysis["level_id"] + trade_type = self.get_trade_type_from_level_id(level_id) + is_buy = level_id.startswith("buy") + current_price = Decimal(self.processed_data["reference_price"]) + # Level is working if: + # - it has active executors not trading + # - it has too many active executors for the level + # - it has a cooldown that is still active + # - not satisfied price distance requirements + if (analysis["active_executors_not_trading"] or + analysis["total_active_executors"] >= self.config.max_active_executors_by_level or + (analysis["open_order_last_update"] and current_time - analysis["open_order_last_update"] < self.config.get_cooldown_time(trade_type)) or + (is_buy and analysis["min_price"] and analysis["min_price"] * (Decimal("1") - self.config.min_buy_price_distance_pct) < current_price) or + (not is_buy and analysis["max_price"] and analysis["max_price"] * (Decimal("1") + self.config.min_sell_price_distance_pct) > current_price)): + working_levels_ids.append(level_id) + continue + return self.get_not_active_levels_ids(working_levels_ids) + + def stop_actions_proposal(self) -> List[ExecutorAction]: + """ + Create stop actions with enhanced refresh logic. + """ + stop_actions = [] + stop_actions.extend(self.executors_to_refresh()) + stop_actions.extend(self.process_hanging_executors()) + return stop_actions + + def executors_to_refresh(self) -> List[ExecutorAction]: + """Refresh executors that have been active too long""" + executors_to_refresh = self.filter_executors( + executors=self.executors_info, + filter_func=lambda x: ( + not x.is_trading and x.is_active and + self.market_data_provider.time() - x.timestamp > self.config.executor_refresh_time + ) + ) + return [StopExecutorAction( + controller_id=self.config.id, + keep_position=True, + executor_id=executor.id + ) for executor in executors_to_refresh] + + def process_hanging_executors(self) -> List[ExecutorAction]: + """Process hanging executors and effectivize them when appropriate""" + current_time = self.market_data_provider.time() + + # Find hanging executors that should be effectivized + executors_to_effectivize = self.filter_executors( + executors=self.executors_info, + filter_func=lambda x: ( + x.is_trading and + self.should_effectivize_executor(x, current_time) + ) + ) + return [StopExecutorAction( + controller_id=self.config.id, + keep_position=True, + executor_id=executor.id + ) for executor in executors_to_effectivize] + + async def update_processed_data(self): + """ + Update processed data with enhanced breakeven tracking. + """ + reference_price = self.market_data_provider.get_price_by_type( + self.config.connector_name, self.config.trading_pair, PriceType.MidPrice + ) + + position_held = next((position for position in self.positions_held if + (position.trading_pair == self.config.trading_pair) & + (position.connector_name == self.config.connector_name)), None) + + target_position = self.config.total_amount_quote * self.config.target_base_pct + + if position_held is not None: + position_amount = position_held.amount + current_base_pct = position_held.amount_quote / self.config.total_amount_quote + deviation = (target_position - position_held.amount_quote) / target_position + unrealized_pnl_pct = position_held.unrealized_pnl_quote / position_held.amount_quote if position_held.amount_quote != 0 else Decimal( + "0") + breakeven_price = position_held.breakeven_price + else: + position_amount = 0 + current_base_pct = 0 + deviation = 1 + unrealized_pnl_pct = 0 + breakeven_price = None + + if self.config.tick_mode: + spread_multiplier = (self.market_data_provider.get_trading_rules(self.config.connector_name, + self.config.trading_pair).min_price_increment / reference_price) + else: + spread_multiplier = Decimal("1") + + self.processed_data = { + "reference_price": Decimal(reference_price), + "spread_multiplier": spread_multiplier, + "deviation": deviation, + "current_base_pct": current_base_pct, + "unrealized_pnl_pct": unrealized_pnl_pct, + "position_amount": position_amount, + "breakeven_price": breakeven_price + } + + def get_executor_config(self, level_id: str, price: Decimal, amount: Decimal): + """Get executor config for a given level""" + trade_type = self.get_trade_type_from_level_id(level_id) + return PositionExecutorConfig( + timestamp=self.market_data_provider.time(), + level_id=level_id, + connector_name=self.config.connector_name, + trading_pair=self.config.trading_pair, + entry_price=price, + amount=amount, + triple_barrier_config=self.config.triple_barrier_config, + leverage=self.config.leverage, + side=trade_type, + ) + + def get_level_id_from_side(self, trade_type: TradeType, level: int) -> str: + """Get level ID based on trade type and level""" + return f"{trade_type.name.lower()}_{level}" + + def get_trade_type_from_level_id(self, level_id: str) -> TradeType: + return TradeType.BUY if level_id.startswith("buy") else TradeType.SELL + + def get_level_from_level_id(self, level_id: str) -> int: + return int(level_id.split('_')[1]) + + def get_not_active_levels_ids(self, active_levels_ids: List[str]) -> List[str]: + """Get levels that should be executed based on position constraints""" + buy_ids_missing = [ + self.get_level_id_from_side(TradeType.BUY, level) + for level in range(len(self.config.buy_spreads)) + if self.get_level_id_from_side(TradeType.BUY, level) not in active_levels_ids + ] + sell_ids_missing = [ + self.get_level_id_from_side(TradeType.SELL, level) + for level in range(len(self.config.sell_spreads)) + if self.get_level_id_from_side(TradeType.SELL, level) not in active_levels_ids + ] + + current_pct = self.processed_data["current_base_pct"] + + if current_pct < self.config.min_base_pct: + return buy_ids_missing + elif current_pct > self.config.max_base_pct: + return sell_ids_missing + return buy_ids_missing + sell_ids_missing + + def analyze_all_levels(self) -> List[Dict]: + """Analyze executors for all levels.""" + level_ids: Set[str] = {e.custom_info.get("level_id") for e in self.executors_info if "level_id" in e.custom_info} + return [self._analyze_by_level_id(level_id) for level_id in level_ids] + + def _analyze_by_level_id(self, level_id: str) -> Dict: + """Analyze executors for a specific level ID.""" + filtered_executors = [e for e in self.executors_info if e.custom_info.get("level_id") == level_id and e.is_active] + + active_not_trading = [e for e in filtered_executors if e.is_active and not e.is_trading] + active_trading = [e for e in filtered_executors if e.is_active and e.is_trading] + + open_order_last_updates = [ + e.custom_info.get("open_order_last_update") for e in filtered_executors + if "open_order_last_update" in e.custom_info and e.custom_info["open_order_last_update"] is not None + ] + latest_open_order_update = max(open_order_last_updates) if open_order_last_updates else None + + prices = [e.config.entry_price for e in filtered_executors if hasattr(e.config, 'entry_price')] + + return { + "level_id": level_id, + "active_executors_not_trading": active_not_trading, + "active_executors_trading": active_trading, + "total_active_executors": len(active_not_trading) + len(active_trading), + "open_order_last_update": latest_open_order_update, + "min_price": min(prices) if prices else None, + "max_price": max(prices) if prices else None, + } + + def to_format_status(self) -> List[str]: + """ + Simplified status display showing executors by level_id and trade type. + """ + from decimal import Decimal + + status = [] + + # Get all required data + base_pct = self.processed_data.get('current_base_pct', Decimal("0")) + pnl = self.processed_data.get('unrealized_pnl_pct', Decimal('0')) + breakeven = self.processed_data.get('breakeven_price') + breakeven_str = f"{breakeven:.2f}" if breakeven is not None else "N/A" + current_price = self.processed_data['reference_price'] + + # Layout dimensions + outer_width = 100 + inner_width = outer_width - 4 + + # Header + status.append("╒" + "═" * inner_width + "╕") + pnl_sign = "+" if pnl >= 0 else "" + status.append( + f"│ {self.config.connector_name}:{self.config.trading_pair} | Price: {current_price:.2f} | Position: {base_pct:.1%} ({self.config.min_base_pct:.1%}-{self.config.max_base_pct:.1%}) | PnL: {pnl_sign}{pnl:.2%} | Breakeven: {breakeven_str}{' ' * (inner_width - 80)} │") + + # Executors by Level + status.append(f"├{'─' * inner_width}┤") + status.append(f"│ {'Level':<12} │ {'Type':<6} │ {'State':<10} │ {'Price':<12} │ {'Amount':<12} │ {'Distance':<12} │ {'Age':<10} │") + status.append(f"├{'─' * 12}┼{'─' * 6}┼{'─' * 10}┼{'─' * 12}┼{'─' * 12}┼{'─' * 12}┼{'─' * 10}┤") + + # Analyze all levels and display each executor + all_levels = self.analyze_all_levels() + current_time = self.market_data_provider.time() + + for level_analysis in sorted(all_levels, key=lambda x: (not x["level_id"].startswith("buy"), x["level_id"])): + level_id = level_analysis["level_id"] + trade_type = "BUY" if level_id.startswith("buy") else "SELL" + + # Get all executors for this level + level_executors = [e for e in self.executors_info if e.custom_info.get("level_id") == level_id and e.is_active] + + if not level_executors: + continue + + for executor in level_executors: + # Determine state + if executor.is_trading: + state = "HANGING" + elif executor.is_active and not executor.is_trading: + state = "ACTIVE" + else: + state = "UNKNOWN" + + # Get price and amount + price = executor.config.entry_price if hasattr(executor.config, 'entry_price') else Decimal("0") + amount = executor.config.amount if hasattr(executor.config, 'amount') else Decimal("0") + + # Calculate distance from current price + if price > 0: + distance_pct = ((price - current_price) / current_price) * 100 + distance_str = f"{distance_pct:+.2f}%" + else: + distance_str = "N/A" + + # Calculate age + age = current_time - executor.timestamp + age_str = f"{int(age)}s" + + status.append(f"│ {level_id:<12} │ {trade_type:<6} │ {state:<10} │ {price:<12.2f} │ {amount:<12.4f} │ {distance_str:<12} │ {age_str:<10} │") + + # Bottom border + status.append(f"╘{'═' * inner_width}╛") + + return status diff --git a/bots/controllers/generic/quantum_grid_allocator.py b/bots/controllers/generic/quantum_grid_allocator.py index 19b7a47c..09744065 100644 --- a/bots/controllers/generic/quantum_grid_allocator.py +++ b/bots/controllers/generic/quantum_grid_allocator.py @@ -66,7 +66,7 @@ class QGAConfig(ControllerConfigBase): activation_bounds: Decimal = Field( default=Decimal("0.0002"), # Activation bounds for orders json_schema_extra={"is_updatable": True}) - bb_lenght: int = 100 + bb_length: int = 100 bb_std_dev: float = 2.0 interval: str = "1s" dynamic_grid_range: bool = Field(default=False, json_schema_extra={"is_updatable": True}) @@ -113,7 +113,7 @@ def __init__(self, config: QGAConfig, *args, **kwargs): connector=config.connector_name, trading_pair=trading_pair + "-" + config.quote_asset, interval=config.interval, - max_records=config.bb_lenght + 100 + max_records=config.bb_length + 100 ) for trading_pair in config.portfolio_allocation.keys()] super().__init__(config, *args, **kwargs) self.initialize_rate_sources() @@ -130,13 +130,13 @@ async def update_processed_data(self): connector_name=self.config.connector_name, trading_pair=trading_pair, interval=self.config.interval, - max_records=self.config.bb_lenght + 100 + max_records=self.config.bb_length + 100 ) if len(candles) == 0: bb_width = self.config.grid_range else: - bb = ta.bbands(candles["close"], length=self.config.bb_lenght, std=self.config.bb_std_dev) - bb_width = bb[f"BBB_{self.config.bb_lenght}_{self.config.bb_std_dev}"].iloc[-1] / 100 + bb = ta.bbands(candles["close"], length=self.config.bb_length, std=self.config.bb_std_dev) + bb_width = bb[f"BBB_{self.config.bb_length}_{self.config.bb_std_dev}"].iloc[-1] / 100 self.processed_data[trading_pair] = { "bb_width": bb_width } diff --git a/bots/controllers/params_docs/controller_config_template_base.md b/bots/controllers/params_docs/controller_config_template_base.md index e314a1c5..be051824 100644 --- a/bots/controllers/params_docs/controller_config_template_base.md +++ b/bots/controllers/params_docs/controller_config_template_base.md @@ -1,138 +1,66 @@ # Controller Configuration Documentation Template -## General Description +## Overview -This section should provide a comprehensive overview of the controller's trading strategy and operational characteristics. Include: - -- **Strategy Type**: Clearly identify the trading approach (market making, directional trading, arbitrage, cross-exchange market making, etc.) -- **Core Logic**: Explain how the controller analyzes market data and makes trading decisions -- **Market Conditions**: - - **Optimal Conditions**: Describe when this strategy performs best (e.g., high volatility, stable trends, specific liquidity conditions) - - **Challenging Conditions**: Identify scenarios where the strategy may underperform (e.g., low liquidity, extreme volatility spikes, trending markets for mean-reversion strategies) -- **Risk Profile**: Outline the primary risks and how the controller manages them -- **Expected Outcomes**: Provide realistic expectations for performance under various market conditions +2-3 sentence description covering: strategy type, core logic, optimal market conditions, and main risks. ## Parameters Each parameter should be documented with the following structure: -### `parameter_name` -- **Type**: `data_type` (e.g., `Decimal`, `int`, `str`, `List[float]`, `OrderType`) -- **Default**: `default_value` -- **Range**: `[min_value, max_value]` or constraints -- **Description**: Clear explanation of what this parameter controls - -#### Value Impact Analysis: -- **Low Values** (`example_range`): Explain the behavior and implications -- **Medium Values** (`example_range`): Typical use case and expected behavior -- **High Values** (`example_range`): Effects and potential risks -- **Edge Cases**: What happens at extremes (0, negative, very large values) +### `parameter_name`: **Type** = `type` | **Default** = `value` | **Range** = `[min, max]` | Brief description -#### Interaction Effects: -- List other parameters this interacts with -- Describe how combinations affect overall behavior +#### Impact: +- **Low** (`range`): behavior and implications +- **High** (`range`): behavior and implications -#### Example Configurations: -```yaml -# Conservative setting -parameter_name: value_1 - -# Moderate setting -parameter_name: value_2 - -# Aggressive setting -parameter_name: value_3 -``` +#### Interactions: +Related params and how they affect behavior together (optional, only if significant interactions exist) ## Common Configurations -This section presents complete, ready-to-use configurations for typical trading scenarios. Each configuration should include: - -### Configuration Name -**Use Case**: Brief description of when to use this configuration - -**Key Characteristics**: -- Risk level -- Capital requirements -- Market conditions suited for -- Expected behavior - -**Template**: +### Conservative +Low risk, suitable for beginners or stable markets ```yaml -# Configuration description and notes -controller_name: controller_type -controller_type: category -connector_name: PLACEHOLDER_EXCHANGE -trading_pair: PLACEHOLDER_TRADING_PAIR -portfolio_allocation: 0.XX - -# Core parameters with explanations -parameter_1: value # Why this value -parameter_2: value # Impact on strategy -parameter_3: value # Risk consideration - -# Advanced parameters -parameter_4: value -parameter_5: value +controller_name: example_name +connector_name: EXCHANGE # e.g., binance +trading_pair: PAIR # e.g., BTC-USDT +param1: value # inline comment explaining why +param2: value ``` -**Placeholders**: -- `PLACEHOLDER_EXCHANGE`: Replace with your exchange (e.g., binance, coinbase) -- `PLACEHOLDER_TRADING_PAIR`: Replace with your trading pair (e.g., BTC-USDT, ETH-USD) -- Adjust numerical values based on your risk tolerance and capital - -### Quick Start Configurations - -#### 1. Conservative Configuration -Suitable for beginners or low-risk tolerance -```yaml -# Full configuration here -``` - -#### 2. Balanced Configuration -Standard setup for most market conditions +### Balanced +Standard setup for most conditions ```yaml -# Full configuration here +controller_name: example_name +connector_name: EXCHANGE +trading_pair: PAIR +param1: value +param2: value ``` -#### 3. Aggressive Configuration +### Aggressive Higher risk/reward for experienced traders ```yaml -# Full configuration here +controller_name: example_name +connector_name: EXCHANGE +trading_pair: PAIR +param1: value +param2: value ``` -## Performance Tuning Guide - -### Key Parameters for Optimization -1. **Parameter Group 1** - Impact on execution speed -2. **Parameter Group 2** - Risk management controls -3. **Parameter Group 3** - Profit targets and stops - -### Common Adjustments by Market Condition -- **High Volatility**: Adjust parameters X, Y, Z -- **Low Liquidity**: Modify parameters A, B, C -- **Trending Markets**: Update parameters D, E, F - -## Troubleshooting +## Tuning by Market Condition -### Common Issues and Solutions -- **Issue**: Orders not filling - - **Solution**: Adjust spread parameters or check minimum order sizes - -- **Issue**: Excessive losses - - **Solution**: Review stop loss settings and position sizing +- **High Volatility**: Adjust params X, Y +- **Low Liquidity**: Adjust params A, B +- **Trending**: Adjust params C, D ## Best Practices -1. **Start Conservative**: Begin with smaller position sizes and wider spreads -2. **Monitor Performance**: Track key metrics before increasing exposure -3. **Regular Review**: Periodically assess and adjust parameters based on performance -4. **Risk Management**: Always set appropriate stop losses and position limits -5. **Testing**: Use paper trading or small amounts when trying new configurations +- Start with conservative settings and small position sizes +- Monitor performance before scaling up +- Set appropriate stop losses and position limits ## Additional Notes -- Version compatibility information -- Exchange-specific considerations -- Regulatory compliance notes (if applicable) -- Links to related documentation or resources \ No newline at end of file +Optional: version info, exchange-specific notes, or related docs \ No newline at end of file diff --git a/bots/controllers/params_docs/generic_pmm.md b/bots/controllers/params_docs/generic_pmm.md index 33a57748..22f3ca60 100644 --- a/bots/controllers/params_docs/generic_pmm.md +++ b/bots/controllers/params_docs/generic_pmm.md @@ -38,9 +38,11 @@ The PMM (Pure Market Making) controller implements a sophisticated market making - **Description**: The trading pair to make markets on #### Value Impact Analysis: -- Major pairs (BTC-USDT, ETH-USDT) typically have tighter spreads and higher competition +- **FDUSD pairs on Binance** (BTC-FDUSD, ETH-FDUSD): **RECOMMENDED** - Zero maker/taker fees, ideal for market making +- Major pairs (BTC-USDT, ETH-USDT) typically have tighter spreads and higher competition but incur trading fees - Altcoin pairs may offer wider spreads but higher volatility risk - Stablecoin pairs (USDC-USDT) have minimal directional risk but tiny spreads +- **Fee Consideration**: On Binance, FDUSD pairs have 0% fees, making them significantly more profitable for high-frequency market making ### `portfolio_allocation` - **Type**: `Decimal` @@ -111,7 +113,7 @@ The PMM (Pure Market Making) controller implements a sophisticated market making #### Example Configurations: ```yaml -# Liquid market (BTC-USDT) +# Liquid market (BTC-FDUSD on Binance - zero fees) buy_spreads: [0.0001, 0.0002, 0.0005, 0.0007] sell_spreads: [0.0002, 0.0004, 0.0006, 0.0008] @@ -261,7 +263,7 @@ buy_amounts_pct: [1, 1, 2, 3] controller_name: pmm controller_type: generic connector_name: binance -trading_pair: BTC-USDT +trading_pair: BTC-FDUSD # Zero fees on Binance portfolio_allocation: 0.025 # Only 2.5% allocation total_amount_quote: 1000 @@ -294,8 +296,8 @@ global_stop_loss: 0.03 ```yaml controller_name: pmm controller_type: generic -connector_name: binance_perpetual -trading_pair: ETH-USDT +connector_name: binance +trading_pair: ETH-FDUSD # Zero fees on Binance spot portfolio_allocation: 0.05 total_amount_quote: 5000 @@ -468,23 +470,25 @@ global_stop_loss: 0.15 ## Best Practices -1. **Start Small**: Begin with 1-2% portfolio allocation and low/no leverage -2. **Paper Trade First**: Test configurations without real capital +1. **Start Small**: Begin with 1-2% portfolio allocation and low/no leverage on real funds +2. **Use FDUSD Pairs on Binance**: Take advantage of zero-fee trading for BTC-FDUSD, ETH-FDUSD, and other FDUSD pairs 3. **Monitor Actively**: Watch performance for first 24-48 hours of new config 4. **Gradual Scaling**: Increase allocation/leverage gradually as confidence builds 5. **Risk Limits**: Always set global stop loss and take profit levels 6. **Market Research**: Understand the specific dynamics of your chosen trading pair 7. **Regular Reviews**: Analyze performance weekly and adjust parameters 8. **Diversification**: Consider running multiple instances on different pairs -9. **Fee Awareness**: Account for trading fees in spread calculations +9. **Fee Optimization**: Use zero-fee pairs (FDUSD on Binance) or exchanges with maker rebates 10. **Backup Plans**: Have exit strategy if market conditions change dramatically ## Additional Notes +- **FDUSD Advantage**: Binance offers 0% maker and taker fees on FDUSD pairs, making them ideal for PMM strategies - PMM works best in liquid markets with consistent two-way flow - Avoid during major news events unless specifically configured for volatility - Consider time-of-day effects (Asian/European/US sessions) -- Some exchanges have special maker fee rebates that improve profitability +- Some exchanges have special maker fee rebates that improve profitability (but FDUSD pairs are already free on Binance) - Always ensure sufficient balance for potential position accumulation - The controller automatically handles position sizing based on available balance -- Monitor the skew visualization in status to understand rebalancing behavior \ No newline at end of file +- Monitor the skew visualization in status to understand rebalancing behavior +- Start with real funds but small amounts - real market dynamics differ from simulations \ No newline at end of file diff --git a/config.py b/config.py index 97abdd45..5e65c173 100644 --- a/config.py +++ b/config.py @@ -56,7 +56,7 @@ class SecuritySettings(BaseSettings): class AWSSettings(BaseSettings): """AWS configuration for S3 archiving.""" - + api_key: str = Field(default="", description="AWS API key") secret_key: str = Field(default="", description="AWS secret key") s3_default_bucket_name: str = Field(default="", description="Default S3 bucket for archiving") @@ -64,6 +64,17 @@ class AWSSettings(BaseSettings): model_config = SettingsConfigDict(env_prefix="AWS_", extra="ignore") +class GatewaySettings(BaseSettings): + """Gateway service configuration.""" + + url: str = Field( + default="http://localhost:15888", + description="Gateway service URL (use 'http://gateway:15888' when running in Docker)" + ) + + model_config = SettingsConfigDict(env_prefix="GATEWAY_", extra="ignore") + + class AppSettings(BaseSettings): """Main application settings.""" @@ -94,12 +105,13 @@ class AppSettings(BaseSettings): class Settings(BaseSettings): """Combined application settings.""" - + broker: BrokerSettings = Field(default_factory=BrokerSettings) database: DatabaseSettings = Field(default_factory=DatabaseSettings) market_data: MarketDataSettings = Field(default_factory=MarketDataSettings) security: SecuritySettings = Field(default_factory=SecuritySettings) aws: AWSSettings = Field(default_factory=AWSSettings) + gateway: GatewaySettings = Field(default_factory=GatewaySettings) app: AppSettings = Field(default_factory=AppSettings) # Direct banned_tokens field to handle env parsing diff --git a/database/__init__.py b/database/__init__.py index 0690b994..b0380ac7 100644 --- a/database/__init__.py +++ b/database/__init__.py @@ -1,8 +1,19 @@ -from .models import AccountState, TokenState, Order, Trade, PositionSnapshot, FundingPayment, BotRun, Base +from .models import ( + AccountState, TokenState, Order, Trade, PositionSnapshot, FundingPayment, BotRun, + GatewaySwap, GatewayCLMMPosition, GatewayCLMMEvent, + Base +) from .connection import AsyncDatabaseManager -from .repositories import AccountRepository, BotRunRepository -from .repositories.order_repository import OrderRepository -from .repositories.trade_repository import TradeRepository -from .repositories.funding_repository import FundingRepository +from .repositories import ( + AccountRepository, BotRunRepository, + OrderRepository, TradeRepository, FundingRepository, + GatewaySwapRepository, GatewayCLMMRepository +) -__all__ = ["AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "BotRun", "Base", "AsyncDatabaseManager", "AccountRepository", "BotRunRepository", "OrderRepository", "TradeRepository", "FundingRepository"] \ No newline at end of file +__all__ = [ + "AccountState", "TokenState", "Order", "Trade", "PositionSnapshot", "FundingPayment", "BotRun", + "GatewaySwap", "GatewayCLMMPosition", "GatewayCLMMEvent", + "Base", "AsyncDatabaseManager", + "AccountRepository", "BotRunRepository", "OrderRepository", "TradeRepository", "FundingRepository", + "GatewaySwapRepository", "GatewayCLMMRepository" +] \ No newline at end of file diff --git a/database/models.py b/database/models.py index e95b07b0..a16af95e 100644 --- a/database/models.py +++ b/database/models.py @@ -177,35 +177,175 @@ class FundingPayment(Base): class BotRun(Base): __tablename__ = "bot_runs" - + id = Column(Integer, primary_key=True, index=True) - + # Bot identification bot_name = Column(String, nullable=False, index=True) instance_name = Column(String, nullable=False, index=True) - + # Deployment info deployed_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) strategy_type = Column(String, nullable=False, index=True) # 'script' or 'controller' strategy_name = Column(String, nullable=False, index=True) config_name = Column(String, nullable=True, index=True) - + # Runtime tracking stopped_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) - + # Status tracking deployment_status = Column(String, nullable=False, default="DEPLOYED", index=True) # DEPLOYED, FAILED, ARCHIVED run_status = Column(String, nullable=False, default="CREATED", index=True) # CREATED, RUNNING, STOPPED, ERROR - + # Configuration and final state deployment_config = Column(Text, nullable=True) # JSON of full deployment config final_status = Column(Text, nullable=True) # JSON of final bot state, performance, etc. - + # Account info account_name = Column(String, nullable=False, index=True) - + # Metadata image_version = Column(String, nullable=True, index=True) error_message = Column(Text, nullable=True) +class GatewaySwap(Base): + __tablename__ = "gateway_swaps" + + id = Column(Integer, primary_key=True, index=True) + + # Transaction identification + transaction_hash = Column(String, nullable=False, unique=True, index=True) + + # Timestamps + timestamp = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + + # Network and connector info (unified format) + network = Column(String, nullable=False, index=True) # chain-network format: solana-mainnet-beta, ethereum-mainnet + connector = Column(String, nullable=False, index=True) # jupiter, 0x, etc. + wallet_address = Column(String, nullable=False, index=True) + + # Swap details + trading_pair = Column(String, nullable=False, index=True) + base_token = Column(String, nullable=False, index=True) + quote_token = Column(String, nullable=False, index=True) + side = Column(String, nullable=False) # BUY, SELL + + # Amounts + input_amount = Column(Numeric(precision=30, scale=18), nullable=False) + output_amount = Column(Numeric(precision=30, scale=18), nullable=False) + price = Column(Numeric(precision=30, scale=18), nullable=False) + + # Slippage and fees + slippage_pct = Column(Numeric(precision=10, scale=6), nullable=True) + gas_fee = Column(Numeric(precision=30, scale=18), nullable=True) + gas_token = Column(String, nullable=True) # SOL, ETH, etc. + + # Status + status = Column(String, nullable=False, default="SUBMITTED", index=True) # SUBMITTED, CONFIRMED, FAILED + + # Pool information (optional) + pool_address = Column(String, nullable=True, index=True) + + # Additional metadata + quote_id = Column(String, nullable=True) # If swap was from a quote + error_message = Column(Text, nullable=True) + + +class GatewayCLMMPosition(Base): + __tablename__ = "gateway_clmm_positions" + + id = Column(Integer, primary_key=True, index=True) + + # Position identification + position_address = Column(String, nullable=False, unique=True, index=True) # CLMM position NFT address + pool_address = Column(String, nullable=False, index=True) + + # Network and connector info (unified format) + network = Column(String, nullable=False, index=True) # chain-network format: solana-mainnet-beta, ethereum-mainnet + connector = Column(String, nullable=False, index=True) # meteora, raydium, uniswap + wallet_address = Column(String, nullable=False, index=True) + + # Position pair + trading_pair = Column(String, nullable=False, index=True) + base_token = Column(String, nullable=False, index=True) + quote_token = Column(String, nullable=False, index=True) + + # Timestamps + created_at = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + closed_at = Column(TIMESTAMP(timezone=True), nullable=True, index=True) + + # Status + status = Column(String, nullable=False, default="OPEN", index=True) # OPEN, CLOSED + + # Price range (CLMM) + lower_price = Column(Numeric(precision=30, scale=18), nullable=False) + upper_price = Column(Numeric(precision=30, scale=18), nullable=False) + lower_bin_id = Column(Integer, nullable=True) # For bin-based CLMM (Meteora) + upper_bin_id = Column(Integer, nullable=True) + + # Initial deposit amounts (for PnL calculation) + initial_base_token_amount = Column(Numeric(precision=30, scale=18), nullable=True) + initial_quote_token_amount = Column(Numeric(precision=30, scale=18), nullable=True) + + # Position rent (SOL locked for position NFT, returned on close) + position_rent = Column(Numeric(precision=30, scale=18), nullable=True) + + # Current liquidity amounts + base_token_amount = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + quote_token_amount = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + + # In range status + in_range = Column(String, nullable=False, default="UNKNOWN") # IN_RANGE, OUT_OF_RANGE, UNKNOWN + + # Price range percentage: (upper_price - lower_price) / lower_price + percentage = Column(Numeric(precision=10, scale=6), nullable=True) + + # Accumulated fees (CLMM) + base_fee_collected = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + quote_fee_collected = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + base_fee_pending = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + quote_fee_pending = Column(Numeric(precision=30, scale=18), nullable=False, default=0) + + # Last update timestamp + last_updated = Column(TIMESTAMP(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False) + + # Relationships + events = relationship("GatewayCLMMEvent", back_populates="position", cascade="all, delete-orphan") + + +class GatewayCLMMEvent(Base): + __tablename__ = "gateway_clmm_events" + + id = Column(Integer, primary_key=True, index=True) + position_id = Column(Integer, ForeignKey("gateway_clmm_positions.id"), nullable=False) + + # Event identification + transaction_hash = Column(String, nullable=False, index=True) + + # Timestamps + timestamp = Column(TIMESTAMP(timezone=True), server_default=func.now(), nullable=False, index=True) + + # Event type + event_type = Column(String, nullable=False, index=True) # OPEN, ADD_LIQUIDITY, REMOVE_LIQUIDITY, COLLECT_FEES, CLOSE + + # Event amounts + base_token_amount = Column(Numeric(precision=30, scale=18), nullable=True) + quote_token_amount = Column(Numeric(precision=30, scale=18), nullable=True) + + # For fee collection + base_fee_collected = Column(Numeric(precision=30, scale=18), nullable=True) + quote_fee_collected = Column(Numeric(precision=30, scale=18), nullable=True) + + # Gas fee + gas_fee = Column(Numeric(precision=30, scale=18), nullable=True) + gas_token = Column(String, nullable=True) + + # Status + status = Column(String, nullable=False, default="SUBMITTED", index=True) # SUBMITTED, CONFIRMED, FAILED + error_message = Column(Text, nullable=True) + + # Relationship + position = relationship("GatewayCLMMPosition", back_populates="events") + + diff --git a/database/repositories/__init__.py b/database/repositories/__init__.py index 362ea052..fa49c47e 100644 --- a/database/repositories/__init__.py +++ b/database/repositories/__init__.py @@ -1,4 +1,17 @@ from .account_repository import AccountRepository from .bot_run_repository import BotRunRepository +from .funding_repository import FundingRepository +from .order_repository import OrderRepository +from .trade_repository import TradeRepository +from .gateway_swap_repository import GatewaySwapRepository +from .gateway_clmm_repository import GatewayCLMMRepository -__all__ = ["AccountRepository", "BotRunRepository"] \ No newline at end of file +__all__ = [ + "AccountRepository", + "BotRunRepository", + "FundingRepository", + "OrderRepository", + "TradeRepository", + "GatewaySwapRepository", + "GatewayCLMMRepository", +] \ No newline at end of file diff --git a/database/repositories/gateway_clmm_repository.py b/database/repositories/gateway_clmm_repository.py new file mode 100644 index 00000000..405480bf --- /dev/null +++ b/database/repositories/gateway_clmm_repository.py @@ -0,0 +1,289 @@ +from datetime import datetime +from typing import Dict, List, Optional +from decimal import Decimal + +from sqlalchemy import desc, select +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import GatewayCLMMPosition, GatewayCLMMEvent + + +class GatewayCLMMRepository: + def __init__(self, session: AsyncSession): + self.session = session + + # ============================================ + # Position Management + # ============================================ + + async def create_position(self, position_data: Dict) -> GatewayCLMMPosition: + """Create a new CLMM position record.""" + position = GatewayCLMMPosition(**position_data) + self.session.add(position) + await self.session.flush() + return position + + async def get_position_by_address(self, position_address: str) -> Optional[GatewayCLMMPosition]: + """Get a position by its address.""" + result = await self.session.execute( + select(GatewayCLMMPosition).where(GatewayCLMMPosition.position_address == position_address) + ) + return result.scalar_one_or_none() + + async def update_position_liquidity( + self, + position_address: str, + base_token_amount: Decimal, + quote_token_amount: Decimal, + in_range: Optional[str] = None + ) -> Optional[GatewayCLMMPosition]: + """Update position liquidity amounts.""" + result = await self.session.execute( + select(GatewayCLMMPosition).where(GatewayCLMMPosition.position_address == position_address) + ) + position = result.scalar_one_or_none() + if position: + position.base_token_amount = float(base_token_amount) + position.quote_token_amount = float(quote_token_amount) + if in_range is not None: + position.in_range = in_range + await self.session.flush() + return position + + async def update_position_fees( + self, + position_address: str, + base_fee_pending: Optional[Decimal] = None, + quote_fee_pending: Optional[Decimal] = None, + base_fee_collected: Optional[Decimal] = None, + quote_fee_collected: Optional[Decimal] = None + ) -> Optional[GatewayCLMMPosition]: + """Update position fee amounts.""" + result = await self.session.execute( + select(GatewayCLMMPosition).where(GatewayCLMMPosition.position_address == position_address) + ) + position = result.scalar_one_or_none() + if position: + if base_fee_pending is not None: + position.base_fee_pending = float(base_fee_pending) + if quote_fee_pending is not None: + position.quote_fee_pending = float(quote_fee_pending) + if base_fee_collected is not None: + position.base_fee_collected = float(base_fee_collected) + if quote_fee_collected is not None: + position.quote_fee_collected = float(quote_fee_collected) + await self.session.flush() + return position + + async def close_position(self, position_address: str) -> Optional[GatewayCLMMPosition]: + """Mark position as closed.""" + result = await self.session.execute( + select(GatewayCLMMPosition).where(GatewayCLMMPosition.position_address == position_address) + ) + position = result.scalar_one_or_none() + if position: + position.status = "CLOSED" + position.closed_at = datetime.utcnow() + await self.session.flush() + return position + + async def get_positions( + self, + network: Optional[str] = None, + connector: Optional[str] = None, + wallet_address: Optional[str] = None, + trading_pair: Optional[str] = None, + status: Optional[str] = None, + position_addresses: Optional[List[str]] = None, + limit: int = 100, + offset: int = 0 + ) -> List[GatewayCLMMPosition]: + """Get positions with filtering and pagination.""" + query = select(GatewayCLMMPosition) + + # Apply filters + if network: + query = query.where(GatewayCLMMPosition.network == network) + if connector: + query = query.where(GatewayCLMMPosition.connector == connector) + if wallet_address: + query = query.where(GatewayCLMMPosition.wallet_address == wallet_address) + if trading_pair: + query = query.where(GatewayCLMMPosition.trading_pair == trading_pair) + if status: + query = query.where(GatewayCLMMPosition.status == status) + if position_addresses: + query = query.where(GatewayCLMMPosition.position_address.in_(position_addresses)) + + # Apply ordering and pagination + query = query.order_by(GatewayCLMMPosition.created_at.desc()) + query = query.limit(limit).offset(offset) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_open_positions( + self, + network: Optional[str] = None, + wallet_address: Optional[str] = None + ) -> List[GatewayCLMMPosition]: + """Get all open positions.""" + return await self.get_positions( + network=network, + wallet_address=wallet_address, + status="OPEN", + limit=1000 + ) + + # ============================================ + # Event Management + # ============================================ + + async def create_event(self, event_data: Dict) -> GatewayCLMMEvent: + """Create a new CLMM event record.""" + event = GatewayCLMMEvent(**event_data) + self.session.add(event) + await self.session.flush() + return event + + async def get_event_by_tx_hash( + self, + transaction_hash: str, + event_type: Optional[str] = None + ) -> Optional[GatewayCLMMEvent]: + """Get an event by transaction hash.""" + query = select(GatewayCLMMEvent).where(GatewayCLMMEvent.transaction_hash == transaction_hash) + if event_type: + query = query.where(GatewayCLMMEvent.event_type == event_type) + + result = await self.session.execute(query) + return result.scalar_one_or_none() + + async def update_event_status( + self, + transaction_hash: str, + status: str, + error_message: Optional[str] = None, + gas_fee: Optional[Decimal] = None, + gas_token: Optional[str] = None + ) -> Optional[GatewayCLMMEvent]: + """Update event status after transaction confirmation.""" + result = await self.session.execute( + select(GatewayCLMMEvent).where(GatewayCLMMEvent.transaction_hash == transaction_hash) + ) + event = result.scalar_one_or_none() + if event: + event.status = status + if error_message: + event.error_message = error_message + if gas_fee is not None: + event.gas_fee = float(gas_fee) + if gas_token: + event.gas_token = gas_token + await self.session.flush() + return event + + async def get_position_events( + self, + position_address: str, + event_type: Optional[str] = None, + limit: int = 100 + ) -> List[GatewayCLMMEvent]: + """Get all events for a position.""" + # First get the position + position = await self.get_position_by_address(position_address) + if not position: + return [] + + # Then get its events + query = select(GatewayCLMMEvent).where(GatewayCLMMEvent.position_id == position.id) + + if event_type: + query = query.where(GatewayCLMMEvent.event_type == event_type) + + query = query.order_by(GatewayCLMMEvent.timestamp.desc()).limit(limit) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_pending_events(self, limit: int = 100) -> List[GatewayCLMMEvent]: + """Get events that are still pending confirmation.""" + query = select(GatewayCLMMEvent).where( + GatewayCLMMEvent.status == "SUBMITTED" + ).order_by(GatewayCLMMEvent.timestamp.desc()).limit(limit) + + result = await self.session.execute(query) + return result.scalars().all() + + # ============================================ + # Utilities + # ============================================ + + def position_to_dict(self, position: GatewayCLMMPosition) -> Dict: + """Convert GatewayCLMMPosition model to dictionary format with PnL calculation.""" + # Calculate PnL if initial amounts are available + pnl_summary = None + if position.initial_base_token_amount is not None and position.initial_quote_token_amount is not None: + # Current total value = current liquidity + fees collected + current_base_total = float(position.base_token_amount) + float(position.base_fee_collected) + float(position.base_fee_pending) + current_quote_total = float(position.quote_token_amount) + float(position.quote_fee_collected) + float(position.quote_fee_pending) + + # PnL = current - initial + base_pnl = current_base_total - float(position.initial_base_token_amount) + quote_pnl = current_quote_total - float(position.initial_quote_token_amount) + + pnl_summary = { + "initial_base": float(position.initial_base_token_amount), + "initial_quote": float(position.initial_quote_token_amount), + "current_base_total": current_base_total, + "current_quote_total": current_quote_total, + "base_pnl": base_pnl, + "quote_pnl": quote_pnl + } + + return { + "position_address": position.position_address, + "pool_address": position.pool_address, + "network": position.network, + "connector": position.connector, + "wallet_address": position.wallet_address, + "trading_pair": position.trading_pair, + "base_token": position.base_token, + "quote_token": position.quote_token, + "created_at": position.created_at.isoformat(), + "closed_at": position.closed_at.isoformat() if position.closed_at else None, + "status": position.status, + "lower_price": float(position.lower_price), + "upper_price": float(position.upper_price), + "lower_bin_id": position.lower_bin_id, + "upper_bin_id": position.upper_bin_id, + "percentage": float(position.percentage) if position.percentage is not None else None, + "initial_base_token_amount": float(position.initial_base_token_amount) if position.initial_base_token_amount is not None else None, + "initial_quote_token_amount": float(position.initial_quote_token_amount) if position.initial_quote_token_amount is not None else None, + "position_rent": float(position.position_rent) if position.position_rent is not None else None, + "base_token_amount": float(position.base_token_amount), + "quote_token_amount": float(position.quote_token_amount), + "in_range": position.in_range, + "base_fee_collected": float(position.base_fee_collected), + "quote_fee_collected": float(position.quote_fee_collected), + "base_fee_pending": float(position.base_fee_pending), + "quote_fee_pending": float(position.quote_fee_pending), + "pnl_summary": pnl_summary, + "last_updated": position.last_updated.isoformat(), + } + + def event_to_dict(self, event: GatewayCLMMEvent) -> Dict: + """Convert GatewayCLMMEvent model to dictionary format.""" + return { + "transaction_hash": event.transaction_hash, + "timestamp": event.timestamp.isoformat(), + "event_type": event.event_type, + "base_token_amount": float(event.base_token_amount) if event.base_token_amount else None, + "quote_token_amount": float(event.quote_token_amount) if event.quote_token_amount else None, + "base_fee_collected": float(event.base_fee_collected) if event.base_fee_collected else None, + "quote_fee_collected": float(event.quote_fee_collected) if event.quote_fee_collected else None, + "gas_fee": float(event.gas_fee) if event.gas_fee else None, + "gas_token": event.gas_token, + "status": event.status, + "error_message": event.error_message, + } diff --git a/database/repositories/gateway_swap_repository.py b/database/repositories/gateway_swap_repository.py new file mode 100644 index 00000000..57871fb8 --- /dev/null +++ b/database/repositories/gateway_swap_repository.py @@ -0,0 +1,167 @@ +from datetime import datetime +from typing import Dict, List, Optional +from decimal import Decimal + +from sqlalchemy import desc, select +from sqlalchemy.ext.asyncio import AsyncSession + +from database.models import GatewaySwap + + +class GatewaySwapRepository: + def __init__(self, session: AsyncSession): + self.session = session + + async def create_swap(self, swap_data: Dict) -> GatewaySwap: + """Create a new swap record.""" + swap = GatewaySwap(**swap_data) + self.session.add(swap) + await self.session.flush() + return swap + + async def get_swap_by_tx_hash(self, transaction_hash: str) -> Optional[GatewaySwap]: + """Get a swap by its transaction hash.""" + result = await self.session.execute( + select(GatewaySwap).where(GatewaySwap.transaction_hash == transaction_hash) + ) + return result.scalar_one_or_none() + + async def update_swap_status( + self, + transaction_hash: str, + status: str, + error_message: Optional[str] = None, + gas_fee: Optional[Decimal] = None, + gas_token: Optional[str] = None + ) -> Optional[GatewaySwap]: + """Update swap status and optional metadata after transaction confirmation.""" + result = await self.session.execute( + select(GatewaySwap).where(GatewaySwap.transaction_hash == transaction_hash) + ) + swap = result.scalar_one_or_none() + if swap: + swap.status = status + if error_message: + swap.error_message = error_message + if gas_fee is not None: + swap.gas_fee = float(gas_fee) + if gas_token: + swap.gas_token = gas_token + await self.session.flush() + return swap + + async def get_swaps( + self, + network: Optional[str] = None, + connector: Optional[str] = None, + wallet_address: Optional[str] = None, + trading_pair: Optional[str] = None, + status: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: int = 100, + offset: int = 0 + ) -> List[GatewaySwap]: + """Get swaps with filtering and pagination.""" + query = select(GatewaySwap) + + # Apply filters + if network: + query = query.where(GatewaySwap.network == network) + if connector: + query = query.where(GatewaySwap.connector == connector) + if wallet_address: + query = query.where(GatewaySwap.wallet_address == wallet_address) + if trading_pair: + query = query.where(GatewaySwap.trading_pair == trading_pair) + if status: + query = query.where(GatewaySwap.status == status) + if start_time: + start_dt = datetime.fromtimestamp(start_time) + query = query.where(GatewaySwap.timestamp >= start_dt) + if end_time: + end_dt = datetime.fromtimestamp(end_time) + query = query.where(GatewaySwap.timestamp <= end_dt) + + # Apply ordering and pagination + query = query.order_by(GatewaySwap.timestamp.desc()) + query = query.limit(limit).offset(offset) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_pending_swaps(self, limit: int = 100) -> List[GatewaySwap]: + """Get swaps that are still pending confirmation.""" + query = select(GatewaySwap).where( + GatewaySwap.status == "SUBMITTED" + ).order_by(GatewaySwap.timestamp.desc()).limit(limit) + + result = await self.session.execute(query) + return result.scalars().all() + + async def get_swaps_summary( + self, + network: Optional[str] = None, + wallet_address: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None + ) -> Dict: + """Get swap summary statistics.""" + swaps = await self.get_swaps( + network=network, + wallet_address=wallet_address, + start_time=start_time, + end_time=end_time, + limit=10000 # Get all for summary + ) + + total_swaps = len(swaps) + confirmed_swaps = sum(1 for s in swaps if s.status == "CONFIRMED") + failed_swaps = sum(1 for s in swaps if s.status == "FAILED") + pending_swaps = sum(1 for s in swaps if s.status == "SUBMITTED") + + # Calculate total volume (in quote token) + total_volume = sum( + float(s.output_amount if s.side == "BUY" else s.input_amount) + for s in swaps if s.status == "CONFIRMED" + ) + + # Calculate total gas fees + total_gas_fees = sum( + float(s.gas_fee) for s in swaps + if s.gas_fee is not None and s.status == "CONFIRMED" + ) + + return { + "total_swaps": total_swaps, + "confirmed_swaps": confirmed_swaps, + "failed_swaps": failed_swaps, + "pending_swaps": pending_swaps, + "success_rate": confirmed_swaps / total_swaps if total_swaps > 0 else 0, + "total_volume": total_volume, + "total_gas_fees": total_gas_fees, + } + + def to_dict(self, swap: GatewaySwap) -> Dict: + """Convert GatewaySwap model to dictionary format.""" + return { + "transaction_hash": swap.transaction_hash, + "timestamp": swap.timestamp.isoformat(), + "network": swap.network, + "connector": swap.connector, + "wallet_address": swap.wallet_address, + "trading_pair": swap.trading_pair, + "base_token": swap.base_token, + "quote_token": swap.quote_token, + "side": swap.side, + "input_amount": float(swap.input_amount), + "output_amount": float(swap.output_amount), + "price": float(swap.price), + "slippage_pct": float(swap.slippage_pct) if swap.slippage_pct else None, + "gas_fee": float(swap.gas_fee) if swap.gas_fee else None, + "gas_token": swap.gas_token, + "status": swap.status, + "pool_address": swap.pool_address, + "quote_id": swap.quote_id, + "error_message": swap.error_message, + } diff --git a/deps.py b/deps.py index accf6f40..3c1fa8c1 100644 --- a/deps.py +++ b/deps.py @@ -2,6 +2,7 @@ from services.bots_orchestrator import BotsOrchestrator from services.accounts_service import AccountsService from services.docker_service import DockerService +from services.gateway_service import GatewayService from services.market_data_feed_manager import MarketDataFeedManager from utils.bot_archiver import BotArchiver from database import AsyncDatabaseManager @@ -22,6 +23,11 @@ def get_docker_service(request: Request) -> DockerService: return request.app.state.docker_service +def get_gateway_service(request: Request) -> GatewayService: + """Get GatewayService from app state.""" + return request.app.state.gateway_service + + def get_market_data_feed_manager(request: Request) -> MarketDataFeedManager: """Get MarketDataFeedManager from app state.""" return request.app.state.market_data_feed_manager diff --git a/docker-compose.yml b/docker-compose.yml index c6167528..c33f7ebb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,6 +13,12 @@ services: # Override specific values for Docker networking - BROKER_HOST=emqx - DATABASE_URL=postgresql+asyncpg://hbot:hummingbot-api@postgres:5432/hummingbot_api + - GATEWAY_URL=http://host.docker.internal:15888 + extra_hosts: + # Map host.docker.internal to host gateway for Linux compatibility + # On macOS/Windows, Docker Desktop provides this automatically + # On Linux, this maps to the docker bridge gateway IP + - "host.docker.internal:host-gateway" networks: - emqx-bridge depends_on: @@ -50,14 +56,19 @@ services: retries: 5 postgres: container_name: hummingbot-postgres - image: postgres:15 + image: postgres:16 restart: unless-stopped environment: + # These variables automatically create the user and database on first initialization - POSTGRES_DB=hummingbot_api - POSTGRES_USER=hbot - POSTGRES_PASSWORD=hummingbot-api + # Additional init parameters for better compatibility + - POSTGRES_INITDB_ARGS=--encoding=UTF8 volumes: - postgres-data:/var/lib/postgresql/data + # Init script as safety net - only runs on first initialization + - ./init-db.sql:/docker-entrypoint-initdb.d/init-db.sql:ro ports: - "5432:5432" networks: diff --git a/fix-database.sh b/fix-database.sh new file mode 100755 index 00000000..50a06cf1 --- /dev/null +++ b/fix-database.sh @@ -0,0 +1,147 @@ +#!/bin/bash + +# Database Troubleshooting Script +# This script helps diagnose and fix PostgreSQL database initialization issues + +set -e + +# Colors for better output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +echo "🔧 PostgreSQL Database Troubleshooting Tool" +echo "" + +# Check if PostgreSQL container is running +echo -e "${YELLOW}🔍 Checking PostgreSQL container status...${NC}" +if ! docker ps | grep -q hummingbot-postgres; then + echo -e "${RED}❌ PostgreSQL container is not running!${NC}" + echo "" + echo -e "${YELLOW}Starting PostgreSQL container...${NC}" + docker compose up postgres -d + sleep 5 +fi + +# Wait for PostgreSQL to be ready +echo -e "${YELLOW}⏳ Waiting for PostgreSQL to be ready...${NC}" +MAX_RETRIES=30 +RETRY_COUNT=0 +DB_READY=false + +while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do + if docker exec hummingbot-postgres pg_isready -U postgres > /dev/null 2>&1; then + DB_READY=true + break + fi + RETRY_COUNT=$((RETRY_COUNT + 1)) + echo -ne "\r${YELLOW}⏳ Waiting... ($RETRY_COUNT/$MAX_RETRIES)${NC}" + sleep 2 +done +echo "" + +if [ "$DB_READY" = false ]; then + echo -e "${RED}❌ PostgreSQL is not responding. Check logs:${NC}" + echo "docker logs hummingbot-postgres" + exit 1 +fi + +echo -e "${GREEN}✅ PostgreSQL is running!${NC}" +echo "" + +# Check current database state +echo -e "${YELLOW}🔍 Checking database configuration...${NC}" + +# Check if hbot user exists +USER_EXISTS=$(docker exec hummingbot-postgres psql -U postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='hbot'" 2>/dev/null) + +# Check if database exists +DB_EXISTS=$(docker exec hummingbot-postgres psql -U postgres -tAc "SELECT 1 FROM pg_database WHERE datname='hummingbot_api'" 2>/dev/null) + +echo "" +echo -e "${BLUE}Current Status:${NC}" +if [ "$USER_EXISTS" = "1" ]; then + echo -e " User 'hbot': ${GREEN}✓ EXISTS${NC}" +else + echo -e " User 'hbot': ${RED}✗ MISSING${NC}" +fi + +if [ "$DB_EXISTS" = "1" ]; then + echo -e " Database 'hummingbot_api': ${GREEN}✓ EXISTS${NC}" +else + echo -e " Database 'hummingbot_api': ${RED}✗ MISSING${NC}" +fi +echo "" + +# Fix if needed +if [ "$USER_EXISTS" != "1" ] || [ "$DB_EXISTS" != "1" ]; then + echo -e "${YELLOW}🔧 Fixing database configuration...${NC}" + echo "" + + # Check if init-db.sql exists + if [ ! -f "init-db.sql" ]; then + echo -e "${RED}❌ init-db.sql file not found!${NC}" + echo "Please ensure you're running this script from the hummingbot-api directory." + exit 1 + fi + + # Run initialization script + echo -e "${YELLOW}Running database initialization...${NC}" + docker exec -i hummingbot-postgres psql -U postgres < init-db.sql + + if [ $? -eq 0 ]; then + echo "" + echo -e "${GREEN}✅ Database initialized successfully!${NC}" + else + echo "" + echo -e "${RED}❌ Failed to initialize database${NC}" + exit 1 + fi +else + echo -e "${GREEN}✅ Database configuration is correct!${NC}" +fi + +# Test connection with hbot user +echo "" +echo -e "${YELLOW}🧪 Testing connection with hbot user...${NC}" +if docker exec hummingbot-postgres psql -U hbot -d hummingbot_api -c "SELECT version();" > /dev/null 2>&1; then + echo -e "${GREEN}✅ Connection successful!${NC}" +else + echo -e "${RED}❌ Connection failed${NC}" + echo "" + echo -e "${YELLOW}Trying to fix permissions...${NC}" + + docker exec -i hummingbot-postgres psql -U postgres << 'EOF' +\c hummingbot_api +GRANT ALL ON SCHEMA public TO hbot; +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO hbot; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO hbot; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO hbot; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO hbot; +EOF + + if docker exec hummingbot-postgres psql -U hbot -d hummingbot_api -c "SELECT version();" > /dev/null 2>&1; then + echo -e "${GREEN}✅ Permissions fixed! Connection successful!${NC}" + else + echo -e "${RED}❌ Still unable to connect. Manual intervention required.${NC}" + exit 1 + fi +fi + +echo "" +echo -e "${GREEN}🎉 Database is ready to use!${NC}" +echo "" +echo -e "${BLUE}Connection Details:${NC}" +echo " Host: localhost" +echo " Port: 5432" +echo " Database: hummingbot_api" +echo " User: hbot" +echo " Password: hummingbot-api" +echo "" +echo -e "${YELLOW}You can now start the API with:${NC}" +echo " make run" +echo " or" +echo " docker compose up -d" +echo "" \ No newline at end of file diff --git a/init-db.sql b/init-db.sql new file mode 100644 index 00000000..10d5e352 --- /dev/null +++ b/init-db.sql @@ -0,0 +1,43 @@ +-- Database Initialization Script +-- +-- IMPORTANT: This script serves as a SAFETY NET for edge cases where PostgreSQL's +-- automatic initialization (via POSTGRES_USER/POSTGRES_DB env vars) doesn't complete. +-- +-- In most cases, PostgreSQL will automatically create the user and database from the +-- environment variables. However, this script ensures proper initialization when: +-- - Volume data persists from incomplete initialization +-- - Container restarts interrupt the init process +-- - Manual database operations left the system in an inconsistent state +-- +-- This script is safe to run multiple times (idempotent) + +-- Create the hbot user if it doesn't exist +DO $$ +BEGIN + IF NOT EXISTS (SELECT FROM pg_user WHERE usename = 'hbot') THEN + CREATE ROLE hbot WITH LOGIN PASSWORD 'hummingbot-api'; + RAISE NOTICE 'User hbot created successfully'; + ELSE + RAISE NOTICE 'User hbot already exists'; + END IF; +END +$$; + +-- Create the database if it doesn't exist +SELECT 'CREATE DATABASE hummingbot_api OWNER hbot' +WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'hummingbot_api')\gexec + +-- Grant all privileges on the database +GRANT ALL PRIVILEGES ON DATABASE hummingbot_api TO hbot; + +-- Connect to the database and grant schema privileges +\c hummingbot_api + +-- Grant privileges on the public schema +GRANT ALL ON SCHEMA public TO hbot; +GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO hbot; +GRANT ALL PRIVILEGES ON ALL SEQUENCES IN SCHEMA public TO hbot; + +-- Set default privileges for future objects +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO hbot; +ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO hbot; \ No newline at end of file diff --git a/main.py b/main.py index de85a0be..cef9329b 100644 --- a/main.py +++ b/main.py @@ -35,6 +35,7 @@ def patched_save_to_yml(yml_path, cm): from services.bots_orchestrator import BotsOrchestrator from services.accounts_service import AccountsService from services.docker_service import DockerService +from services.gateway_service import GatewayService from services.market_data_feed_manager import MarketDataFeedManager from utils.bot_archiver import BotArchiver from routers import ( @@ -45,6 +46,9 @@ def patched_save_to_yml(yml_path, cm): connectors, controllers, docker, + gateway, + gateway_swap, + gateway_clmm, market_data, portfolio, scripts, @@ -107,9 +111,11 @@ async def lifespan(app: FastAPI): accounts_service = AccountsService( account_update_interval=settings.app.account_update_interval, - market_data_feed_manager=market_data_feed_manager + market_data_feed_manager=market_data_feed_manager, + gateway_url=settings.gateway.url ) docker_service = DockerService() + gateway_service = GatewayService() bot_archiver = BotArchiver( settings.aws.api_key, settings.aws.secret_key, @@ -123,6 +129,7 @@ async def lifespan(app: FastAPI): app.state.bots_orchestrator = bots_orchestrator app.state.accounts_service = accounts_service app.state.docker_service = docker_service + app.state.gateway_service = gateway_service app.state.bot_archiver = bot_archiver app.state.market_data_feed_manager = market_data_feed_manager @@ -191,10 +198,13 @@ def auth_user( # Include all routers with authentication app.include_router(docker.router, dependencies=[Depends(auth_user)]) +app.include_router(gateway.router, dependencies=[Depends(auth_user)]) app.include_router(accounts.router, dependencies=[Depends(auth_user)]) app.include_router(connectors.router, dependencies=[Depends(auth_user)]) app.include_router(portfolio.router, dependencies=[Depends(auth_user)]) app.include_router(trading.router, dependencies=[Depends(auth_user)]) +app.include_router(gateway_swap.router, dependencies=[Depends(auth_user)]) +app.include_router(gateway_clmm.router, dependencies=[Depends(auth_user)]) app.include_router(bot_orchestration.router, dependencies=[Depends(auth_user)]) app.include_router(controllers.router, dependencies=[Depends(auth_user)]) app.include_router(scripts.router, dependencies=[Depends(auth_user)]) diff --git a/models/__init__.py b/models/__init__.py index 04734461..2c4694ab 100644 --- a/models/__init__.py +++ b/models/__init__.py @@ -98,9 +98,20 @@ ) -# Docker models +# Docker models from .docker import DockerImage +# Gateway models (consolidated) +from .gateway import ( + GatewayConfig, + GatewayStatus, + GatewayWalletCredential, + GatewayWalletInfo, + GatewayBalanceRequest, + AddPoolRequest, + AddTokenRequest, +) + # Backtesting models from .backtesting import BacktestingConfig @@ -117,6 +128,36 @@ ConnectorListResponse, ) +# Gateway Trading models (Swap + CLMM only, AMM removed) +from .gateway_trading import ( + # Swap models + SwapQuoteRequest, + SwapQuoteResponse, + SwapExecuteRequest, + SwapExecuteResponse, + # CLMM models + CLMMOpenPositionRequest, + CLMMOpenPositionResponse, + CLMMAddLiquidityRequest, + CLMMRemoveLiquidityRequest, + CLMMClosePositionRequest, + CLMMCollectFeesRequest, + CLMMCollectFeesResponse, + CLMMPositionsOwnedRequest, + CLMMPositionInfo, + CLMMGetPositionInfoRequest, + CLMMPoolInfoRequest, + CLMMPoolBin, + CLMMPoolInfoResponse, + # Pool info models + GetPoolInfoRequest, + PoolInfo, + # Pool listing models + TimeBasedMetrics, + CLMMPoolListItem, + CLMMPoolListResponse, +) + # Portfolio models from .portfolio import ( TokenBalance, @@ -223,6 +264,14 @@ "CredentialRequest", # Docker models "DockerImage", + # Gateway models + "GatewayConfig", + "GatewayStatus", + "GatewayWalletCredential", + "GatewayWalletInfo", + "GatewayBalanceRequest", + "AddPoolRequest", + "AddTokenRequest", # Backtesting models "BacktestingConfig", # Pagination models @@ -236,6 +285,29 @@ "ConnectorTradingRulesResponse", "ConnectorOrderTypesResponse", "ConnectorListResponse", + # Gateway Trading models + "SwapQuoteRequest", + "SwapQuoteResponse", + "SwapExecuteRequest", + "SwapExecuteResponse", + "CLMMOpenPositionRequest", + "CLMMOpenPositionResponse", + "CLMMAddLiquidityRequest", + "CLMMRemoveLiquidityRequest", + "CLMMClosePositionRequest", + "CLMMCollectFeesRequest", + "CLMMCollectFeesResponse", + "CLMMPositionsOwnedRequest", + "CLMMPositionInfo", + "CLMMGetPositionInfoRequest", + "CLMMPoolInfoRequest", + "CLMMPoolBin", + "CLMMPoolInfoResponse", + "GetPoolInfoRequest", + "PoolInfo", + "TimeBasedMetrics", + "CLMMPoolListItem", + "CLMMPoolListResponse", # Portfolio models "TokenBalance", "ConnectorBalances", diff --git a/models/gateway.py b/models/gateway.py new file mode 100644 index 00000000..9ad85bfd --- /dev/null +++ b/models/gateway.py @@ -0,0 +1,74 @@ +from pydantic import BaseModel, Field +from typing import Optional, List + + +# ============================================ +# Container Management Models +# ============================================ + +class GatewayConfig(BaseModel): + """Configuration for Gateway container deployment""" + passphrase: str = Field(description="Gateway passphrase for configuration encryption") + image: str = Field(default="hummingbot/gateway:latest", description="Docker image for Gateway") + port: int = Field(default=15888, description="Port for Gateway API") + dev_mode: bool = Field(default=True, description="Enable development mode") + + +class GatewayStatus(BaseModel): + """Status information for Gateway instance""" + running: bool = Field(description="Whether Gateway container is running") + container_id: Optional[str] = Field(default=None, description="Container ID if running") + image: Optional[str] = Field(default=None, description="Image used for the container") + created_at: Optional[str] = Field(default=None, description="Container creation timestamp") + port: Optional[int] = Field(default=None, description="Port Gateway is running on") + + +# ============================================ +# Wallet Management Models +# ============================================ + +class GatewayWalletCredential(BaseModel): + """Credentials for connecting a Gateway wallet""" + chain: str = Field(description="Blockchain chain (e.g., 'solana', 'ethereum')") + private_key: str = Field(description="Wallet private key") + network: Optional[str] = Field(default=None, description="Network to use (defaults to chain's default)") + + +class GatewayWalletInfo(BaseModel): + """Information about a connected Gateway wallet""" + chain: str = Field(description="Blockchain chain") + address: str = Field(description="Wallet address") + network: str = Field(description="Network the wallet is configured for") + + +# ============================================ +# Pool and Token Management Models +# ============================================ + +class AddPoolRequest(BaseModel): + """Request to add a liquidity pool""" + connector_name: str = Field(description="DEX connector name (e.g., 'raydium', 'meteora')") + type: str = Field(description="Pool type ('clmm' for concentrated liquidity)") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta', 'ethereum-mainnet')") + base: str = Field(description="Base token symbol") + quote: str = Field(description="Quote token symbol") + address: str = Field(description="Pool contract address") + + +class AddTokenRequest(BaseModel): + """Request to add a custom token to Gateway""" + address: str = Field(description="Token contract address") + symbol: str = Field(description="Token symbol") + name: Optional[str] = Field(default=None, description="Token name (defaults to symbol)") + decimals: int = Field(description="Number of decimals for the token") + + +# ============================================ +# Balance Query Models +# ============================================ + +class GatewayBalanceRequest(BaseModel): + """Request for Gateway wallet balances""" + account_name: str = Field(description="Account name") + chain: str = Field(description="Blockchain chain") + tokens: Optional[List[str]] = Field(default=None, description="List of token symbols to query (optional)") diff --git a/models/gateway_trading.py b/models/gateway_trading.py new file mode 100644 index 00000000..2227f418 --- /dev/null +++ b/models/gateway_trading.py @@ -0,0 +1,335 @@ +""" +Models for Gateway DEX trading operations. +Supports swaps via routers (Jupiter, 0x) and CLMM liquidity positions (Meteora, Raydium, Uniswap V3). + +Note: AMM support has been removed. Use Router for simple swaps, CLMM for liquidity provision. +""" +from typing import Optional, List, Dict, Any +from pydantic import BaseModel, Field +from decimal import Decimal + + +# ============================================ +# Swap Models (Router: Jupiter, 0x) +# ============================================ + +class SwapQuoteRequest(BaseModel): + """Request for swap price quote""" + connector: str = Field(description="DEX router connector (e.g., 'jupiter', '0x')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta', 'ethereum-mainnet')") + trading_pair: str = Field(description="Trading pair in BASE-QUOTE format (e.g., 'SOL-USDC')") + side: str = Field(description="Trade side: 'BUY' or 'SELL'") + amount: Decimal = Field(description="Amount to swap (in base token for SELL, quote token for BUY)") + slippage_pct: Optional[Decimal] = Field(default=1.0, description="Maximum slippage percentage (default: 1.0)") + + +class SwapQuoteResponse(BaseModel): + """Response with swap quote details""" + base: str = Field(description="Base token symbol") + quote: str = Field(description="Quote token symbol") + price: Decimal = Field(description="Quoted price (base/quote)") + amount: Decimal = Field(description="Amount specified in request (BUY: base amount to receive, SELL: base amount to sell)") + amount_in: Optional[Decimal] = Field(default=None, description="Actual input amount (BUY: quote to spend, SELL: base to sell)") + amount_out: Optional[Decimal] = Field(default=None, description="Actual output amount (BUY: base to receive, SELL: quote to receive)") + expected_amount: Optional[Decimal] = Field(default=None, description="Deprecated: use amount_out instead") + slippage_pct: Decimal = Field(description="Applied slippage percentage") + gas_estimate: Optional[Decimal] = Field(default=None, description="Estimated gas cost") + + +class SwapExecuteRequest(BaseModel): + """Request to execute a swap""" + connector: str = Field(description="DEX router connector (e.g., 'jupiter', '0x')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + trading_pair: str = Field(description="Trading pair (e.g., 'SOL-USDC')") + side: str = Field(description="Trade side: 'BUY' or 'SELL'") + amount: Decimal = Field(description="Amount to swap") + slippage_pct: Optional[Decimal] = Field(default=1.0, description="Maximum slippage percentage (default: 1.0)") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + +class SwapExecuteResponse(BaseModel): + """Response after executing swap""" + transaction_hash: str = Field(description="Transaction hash") + trading_pair: str = Field(description="Trading pair") + side: str = Field(description="Trade side") + amount: Decimal = Field(description="Amount swapped") + status: str = Field(default="submitted", description="Transaction status") + + +# ============================================ +# CLMM Liquidity Models (Meteora, Raydium, Uniswap V3) +# ============================================ + +class CLMMOpenPositionRequest(BaseModel): + """Request to open a new CLMM position with initial liquidity""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + pool_address: str = Field(description="Pool contract address") + + # Position range + lower_price: Decimal = Field(description="Lower price for position range") + upper_price: Decimal = Field(description="Upper price for position range") + + # Initial liquidity + base_token_amount: Optional[Decimal] = Field(default=None, description="Amount of base token to add") + quote_token_amount: Optional[Decimal] = Field(default=None, description="Amount of quote token to add") + slippage_pct: Optional[Decimal] = Field(default=1.0, description="Maximum slippage percentage (default: 1.0)") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + # Connector-specific parameters (e.g., strategyType for Meteora) + extra_params: Optional[Dict[str, Any]] = Field(default=None, description="Additional connector-specific parameters") + + +class CLMMOpenPositionResponse(BaseModel): + """Response after opening a new CLMM position""" + transaction_hash: str = Field(description="Transaction hash") + position_address: str = Field(description="Address of the newly created position") + trading_pair: str = Field(description="Trading pair") + pool_address: str = Field(description="Pool address") + lower_price: Decimal = Field(description="Lower price bound") + upper_price: Decimal = Field(description="Upper price bound") + status: str = Field(default="submitted", description="Transaction status") + + +class CLMMAddLiquidityRequest(BaseModel): + """Request to add MORE liquidity to an EXISTING CLMM position""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + position_address: str = Field(description="Existing position address to add liquidity to") + base_token_amount: Optional[Decimal] = Field(default=None, description="Amount of base token to add") + quote_token_amount: Optional[Decimal] = Field(default=None, description="Amount of quote token to add") + slippage_pct: Optional[Decimal] = Field(default=1.0, description="Maximum slippage percentage (default: 1.0)") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + +class CLMMRemoveLiquidityRequest(BaseModel): + """Request to remove SOME liquidity from a CLMM position (partial removal)""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + position_address: str = Field(description="Position address to remove liquidity from") + percentage: Decimal = Field(description="Percentage of liquidity to remove (0-100)") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + +class CLMMClosePositionRequest(BaseModel): + """Request to CLOSE a CLMM position completely (removes all liquidity and closes position)""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + position_address: str = Field(description="Position address to close") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + +class CLMMCollectFeesRequest(BaseModel): + """Request to collect fees from a CLMM position""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + position_address: str = Field(description="Position address to collect fees from") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + +class CLMMCollectFeesResponse(BaseModel): + """Response after collecting fees""" + transaction_hash: str = Field(description="Transaction hash") + position_address: str = Field(description="Position address") + base_fee_collected: Optional[Decimal] = Field(default=None, description="Base token fees collected") + quote_fee_collected: Optional[Decimal] = Field(default=None, description="Quote token fees collected") + status: str = Field(default="submitted", description="Transaction status") + + +class CLMMPositionsOwnedRequest(BaseModel): + """Request to get all CLMM positions owned by a wallet for a specific pool""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + pool_address: str = Field(description="Pool contract address to filter positions") + wallet_address: Optional[str] = Field(default=None, description="Wallet address (optional, uses default if not provided)") + + +class CLMMPositionInfo(BaseModel): + """Information about a CLMM liquidity position""" + position_address: str = Field(description="Position address") + pool_address: str = Field(description="Pool address") + trading_pair: str = Field(description="Trading pair") + base_token: str = Field(description="Base token symbol") + quote_token: str = Field(description="Quote token symbol") + base_token_amount: Decimal = Field(description="Base token amount in position") + quote_token_amount: Decimal = Field(description="Quote token amount in position") + current_price: Decimal = Field(description="Current pool price") + lower_price: Decimal = Field(description="Lower price bound") + upper_price: Decimal = Field(description="Upper price bound") + base_fee_amount: Optional[Decimal] = Field(default=None, description="Base token uncollected fees") + quote_fee_amount: Optional[Decimal] = Field(default=None, description="Quote token uncollected fees") + lower_bin_id: Optional[int] = Field(default=None, description="Lower bin ID (Meteora)") + upper_bin_id: Optional[int] = Field(default=None, description="Upper bin ID (Meteora)") + in_range: bool = Field(description="Whether position is currently in range") + + +class CLMMGetPositionInfoRequest(BaseModel): + """Request to get detailed info about a specific CLMM position""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium', 'uniswap')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + position_address: str = Field(description="Position address to query") + + +class CLMMPoolInfoRequest(BaseModel): + """Request to get CLMM pool information by pool address""" + connector: str = Field(description="CLMM connector (e.g., 'meteora', 'raydium')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + pool_address: str = Field(description="Pool contract address") + + +class CLMMPoolBin(BaseModel): + """Individual bin in a CLMM pool (e.g., Meteora)""" + bin_id: int = Field(alias="binId", description="Bin identifier") + price: Decimal = Field(description="Price at this bin") + base_token_amount: Decimal = Field(alias="baseTokenAmount", description="Base token amount in bin") + quote_token_amount: Decimal = Field(alias="quoteTokenAmount", description="Quote token amount in bin") + + model_config = { + "populate_by_name": True, + "json_schema_extra": { + "example": { + "bin_id": -374, + "price": 0.47366592950616504, + "base_token_amount": 19656.740028, + "quote_token_amount": 18197.718539 + } + } + } + + +class CLMMPoolInfoResponse(BaseModel): + """Response with detailed CLMM pool information""" + address: str = Field(description="Pool address") + base_token_address: str = Field(alias="baseTokenAddress", description="Base token contract address") + quote_token_address: str = Field(alias="quoteTokenAddress", description="Quote token contract address") + bin_step: int = Field(alias="binStep", description="Bin step (price difference between bins)") + fee_pct: Decimal = Field(alias="feePct", description="Pool fee percentage") + price: Decimal = Field(description="Current pool price") + base_token_amount: Decimal = Field(alias="baseTokenAmount", description="Total base token liquidity") + quote_token_amount: Decimal = Field(alias="quoteTokenAmount", description="Total quote token liquidity") + active_bin_id: int = Field(alias="activeBinId", description="Currently active bin ID") + dynamic_fee_pct: Optional[Decimal] = Field(None, alias="dynamicFeePct", description="Dynamic fee percentage") + min_bin_id: Optional[int] = Field(None, alias="minBinId", description="Minimum bin ID (Meteora-specific)") + max_bin_id: Optional[int] = Field(None, alias="maxBinId", description="Maximum bin ID (Meteora-specific)") + bins: List[CLMMPoolBin] = Field(default_factory=list, description="List of bins with liquidity") + + model_config = { + "populate_by_name": True, + "json_schema_extra": { + "example": { + "address": "5hbf9JP8k5zdrZp9pokPypFQoBse5mGCmW6nqodurGcd", + "base_token_address": "METvsvVRapdj9cFLzq4Tr43xK4tAjQfwX76z3n6mWQL", + "quote_token_address": "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", + "bin_step": 20, + "fee_pct": 0.2, + "price": 0.47366592950616504, + "base_token_amount": 8645709.142366, + "quote_token_amount": 1095942.335132, + "active_bin_id": -374, + "dynamic_fee_pct": 0.2, + "min_bin_id": -21835, + "max_bin_id": 21835, + "bins": [] + } + } + } + + +# ============================================ +# Pool Information Models +# ============================================ + +class GetPoolInfoRequest(BaseModel): + """Request to get pool information""" + connector: str = Field(description="DEX connector (e.g., 'meteora', 'raydium', 'jupiter')") + network: str = Field(description="Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta')") + trading_pair: str = Field(description="Trading pair (e.g., 'SOL-USDC')") + + +class PoolInfo(BaseModel): + """Information about a liquidity pool""" + type: str = Field(description="Pool type: 'clmm' or 'router'") + address: str = Field(description="Pool address") + trading_pair: str = Field(description="Trading pair") + base_token: str = Field(description="Base token symbol") + quote_token: str = Field(description="Quote token symbol") + current_price: Decimal = Field(description="Current pool price") + base_token_amount: Decimal = Field(description="Base token liquidity in pool") + quote_token_amount: Decimal = Field(description="Quote token liquidity in pool") + fee_pct: Decimal = Field(description="Pool fee percentage") + + # CLMM-specific + bin_step: Optional[int] = Field(default=None, description="Bin step (CLMM)") + active_bin_id: Optional[int] = Field(default=None, description="Active bin ID (CLMM)") + + +# ============================================ +# CLMM Pool Listing Models +# ============================================ + +class TimeBasedMetrics(BaseModel): + """Time-based metrics (volume, fees, fee-to-TVL ratio) for different time periods""" + min_30: Optional[Decimal] = Field(default=None, description="30 minute metric") + hour_1: Optional[Decimal] = Field(default=None, description="1 hour metric") + hour_2: Optional[Decimal] = Field(default=None, description="2 hour metric") + hour_4: Optional[Decimal] = Field(default=None, description="4 hour metric") + hour_12: Optional[Decimal] = Field(default=None, description="12 hour metric") + hour_24: Optional[Decimal] = Field(default=None, description="24 hour metric") + + +class CLMMPoolListItem(BaseModel): + """Individual pool item in CLMM pool listing""" + address: str = Field(description="Pool address") + name: str = Field(description="Pool name (e.g., 'SOL-USDC')") + trading_pair: str = Field(description="Trading pair derived from tokens") + mint_x: str = Field(description="Base token mint address") + mint_y: str = Field(description="Quote token mint address") + bin_step: int = Field(description="Bin step size") + current_price: Decimal = Field(description="Current pool price") + liquidity: str = Field(description="Total liquidity in pool") + reserve_x: str = Field(description="Base token reserves") + reserve_y: str = Field(description="Quote token reserves") + reserve_x_amount: Optional[Decimal] = Field(default=None, description="Base token reserves as decimal amount") + reserve_y_amount: Optional[Decimal] = Field(default=None, description="Quote token reserves as decimal amount") + + # Fee structure + base_fee_percentage: Optional[str] = Field(default=None, description="Base fee percentage") + max_fee_percentage: Optional[str] = Field(default=None, description="Maximum fee percentage") + protocol_fee_percentage: Optional[str] = Field(default=None, description="Protocol fee percentage") + + # APR/APY + apr: Optional[Decimal] = Field(default=None, description="Annual percentage rate") + apy: Optional[Decimal] = Field(default=None, description="Annual percentage yield") + farm_apr: Optional[Decimal] = Field(default=None, description="Farming annual percentage rate") + farm_apy: Optional[Decimal] = Field(default=None, description="Farming annual percentage yield") + + # Volume and fees + volume_24h: Optional[Decimal] = Field(default=None, description="24h trading volume") + fees_24h: Optional[Decimal] = Field(default=None, description="24h fees collected") + today_fees: Optional[Decimal] = Field(default=None, description="Today's fees collected") + cumulative_trade_volume: Optional[str] = Field(default=None, description="Cumulative trade volume") + cumulative_fee_volume: Optional[str] = Field(default=None, description="Cumulative fee volume") + + # Time-based metrics + volume: Optional[TimeBasedMetrics] = Field(default=None, description="Volume across different time periods") + fees: Optional[TimeBasedMetrics] = Field(default=None, description="Fees across different time periods") + fee_tvl_ratio: Optional[TimeBasedMetrics] = Field(default=None, description="Fee-to-TVL ratio across different time periods") + + # Rewards + reward_mint_x: Optional[str] = Field(default=None, description="Base token reward mint address") + reward_mint_y: Optional[str] = Field(default=None, description="Quote token reward mint address") + + # Metadata + tags: Optional[List[str]] = Field(default=None, description="Pool tags") + is_verified: bool = Field(default=False, description="Whether tokens are verified") + is_blacklisted: Optional[bool] = Field(default=None, description="Whether pool is blacklisted") + hide: Optional[bool] = Field(default=None, description="Whether pool should be hidden") + launchpad: Optional[str] = Field(default=None, description="Associated launchpad") + + +class CLMMPoolListResponse(BaseModel): + """Response with list of available CLMM pools""" + pools: List[CLMMPoolListItem] = Field(description="List of available pools") + total: int = Field(description="Total number of pools") + page: int = Field(description="Current page number") + limit: int = Field(description="Results per page") diff --git a/routers/accounts.py b/routers/accounts.py index 200a2930..8d6de870 100644 --- a/routers/accounts.py +++ b/routers/accounts.py @@ -6,7 +6,7 @@ from services.accounts_service import AccountsService from deps import get_accounts_service -from models import PaginatedResponse +from models import PaginatedResponse, GatewayWalletCredential, GatewayWalletInfo router = APIRouter(tags=["Accounts"], prefix="/accounts") @@ -117,15 +117,15 @@ async def delete_credential(account_name: str, connector_name: str, accounts_ser async def add_credential(account_name: str, connector_name: str, credentials: Dict, accounts_service: AccountsService = Depends(get_accounts_service)): """ Add or update connector credentials (API keys) for a specific account and connector. - + Args: account_name: Name of the account connector_name: Name of the connector credentials: Dictionary containing the connector credentials - + Returns: Success message when credentials are added - + Raises: HTTPException: 400 if there's an error adding the credentials """ @@ -135,3 +135,87 @@ async def add_credential(account_name: str, connector_name: str, credentials: Di except Exception as e: await accounts_service.delete_credentials(account_name, connector_name) raise HTTPException(status_code=400, detail=str(e)) + + +# ============================================ +# Gateway Wallet Management Endpoints +# ============================================ + +@router.get("/gateway/wallets") +async def list_gateway_wallets(accounts_service: AccountsService = Depends(get_accounts_service)): + """ + List all wallets managed by Gateway. + Gateway manages its own encrypted wallet storage. + + Returns: + List of wallet information from Gateway + + Raises: + HTTPException: 503 if Gateway unavailable + """ + try: + wallets = await accounts_service.get_gateway_wallets() + return wallets + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.post("/gateway/add-wallet", status_code=status.HTTP_201_CREATED) +async def add_gateway_wallet( + wallet_credential: GatewayWalletCredential, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Add a wallet to Gateway. Gateway handles encryption and storage internally. + + Args: + wallet_credential: Wallet credentials (chain and private_key) + + Returns: + Wallet information from Gateway including address + + Raises: + HTTPException: 503 if Gateway unavailable, 400 on validation error + """ + try: + result = await accounts_service.add_gateway_wallet( + chain=wallet_credential.chain, + private_key=wallet_credential.private_key + ) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + +@router.delete("/gateway/{chain}/{address}") +async def remove_gateway_wallet( + chain: str, + address: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Remove a wallet from Gateway. + + Args: + chain: Blockchain chain (e.g., 'solana', 'ethereum') + address: Wallet address to remove + + Returns: + Success message + + Raises: + HTTPException: 503 if Gateway unavailable + """ + try: + result = await accounts_service.remove_gateway_wallet(chain, address) + return result + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) + + diff --git a/routers/bot_orchestration.py b/routers/bot_orchestration.py index ef6ab761..86988255 100644 --- a/routers/bot_orchestration.py +++ b/routers/bot_orchestration.py @@ -643,7 +643,9 @@ async def deploy_v2_controllers( # Generate unique script config filename with timestamp timestamp = datetime.now().strftime("%Y%m%d-%H%M%S") script_config_filename = f"{deployment.instance_name}-{timestamp}.yml" - + # Use the same name with timestamp for the instance to ensure uniqueness + unique_instance_name = f"{deployment.instance_name}-{timestamp}" + # Ensure controller config names have .yml extension controllers_with_extension = [] for controller in deployment.controllers_config: @@ -651,7 +653,7 @@ async def deploy_v2_controllers( controllers_with_extension.append(f"{controller}.yml") else: controllers_with_extension.append(controller) - + # Create the script config content script_config_content = { "script_file_name": "v2_with_controllers.py", @@ -659,24 +661,24 @@ async def deploy_v2_controllers( "markets": {}, "controllers_config": controllers_with_extension, } - + # Add optional drawdown parameters if provided if deployment.max_global_drawdown_quote is not None: script_config_content["max_global_drawdown_quote"] = deployment.max_global_drawdown_quote if deployment.max_controller_drawdown_quote is not None: script_config_content["max_controller_drawdown_quote"] = deployment.max_controller_drawdown_quote - + # Save the script config to the scripts directory scripts_dir = os.path.join("conf", "scripts") script_config_path = os.path.join(scripts_dir, script_config_filename) fs_util.dump_dict_to_yaml(script_config_path, script_config_content) - + logging.info(f"Generated script config: {script_config_filename} with content: {script_config_content}") - + # Create the V2ScriptDeployment with the generated script config instance_config = V2ScriptDeployment( - instance_name=deployment.instance_name, + instance_name=unique_instance_name, credentials_profile=deployment.credentials_profile, image=deployment.image, script="v2_with_controllers.py", @@ -689,14 +691,15 @@ async def deploy_v2_controllers( if response.get("success"): response["script_config_generated"] = script_config_filename response["controllers_deployed"] = deployment.controllers_config - + response["unique_instance_name"] = unique_instance_name + # Track bot run if deployment was successful try: async with db_manager.get_session_context() as session: bot_run_repo = BotRunRepository(session) await bot_run_repo.create_bot_run( - bot_name=deployment.instance_name, - instance_name=deployment.instance_name, + bot_name=unique_instance_name, + instance_name=unique_instance_name, strategy_type="controller", strategy_name="v2_with_controllers", account_name=deployment.credentials_profile, @@ -704,11 +707,11 @@ async def deploy_v2_controllers( image_version=deployment.image, deployment_config=deployment.dict() ) - logger.info(f"Created bot run record for controller deployment {deployment.instance_name}") + logger.info(f"Created bot run record for controller deployment {unique_instance_name}") except Exception as e: logger.error(f"Failed to create bot run record: {e}") # Don't fail the deployment if bot run creation fails - + return response except Exception as e: diff --git a/routers/connectors.py b/routers/connectors.py index fbc0d40a..c3732b4f 100644 --- a/routers/connectors.py +++ b/routers/connectors.py @@ -1,4 +1,4 @@ -from typing import List, Optional +from typing import List, Optional, Dict from fastapi import APIRouter, Depends, Request, HTTPException, Query from hummingbot.client.settings import AllConnectorSettings @@ -6,6 +6,7 @@ from services.accounts_service import AccountsService from services.market_data_feed_manager import MarketDataFeedManager from deps import get_accounts_service +from models import AddTokenRequest router = APIRouter(tags=["Connectors"], prefix="/connectors") @@ -79,37 +80,37 @@ async def get_trading_rules( async def get_supported_order_types(request: Request, connector_name: str): """ Get order types supported by a specific connector. - + This endpoint uses the MarketDataFeedManager to access non-trading connector instances, which means no authentication or account setup is required. - + Args: request: FastAPI request object connector_name: Name of the connector (e.g., 'binance', 'binance_perpetual') - + Returns: List of supported order types (LIMIT, MARKET, LIMIT_MAKER) - + Raises: HTTPException: 404 if connector not found, 500 for other errors """ try: market_data_feed_manager: MarketDataFeedManager = request.app.state.market_data_feed_manager - + # Access connector through MarketDataProvider's _rate_sources connector_instance = market_data_feed_manager.market_data_provider._rate_sources.get(connector_name) - + if not connector_instance: raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' not found") - + # Get supported order types if hasattr(connector_instance, 'supported_order_types'): order_types = [order_type.name for order_type in connector_instance.supported_order_types()] return {"connector": connector_name, "supported_order_types": order_types} else: raise HTTPException(status_code=404, detail=f"Connector '{connector_name}' does not support order types query") - + except HTTPException: raise except Exception as e: - raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") \ No newline at end of file + raise HTTPException(status_code=500, detail=f"Error retrieving order types: {str(e)}") diff --git a/routers/gateway.py b/routers/gateway.py new file mode 100644 index 00000000..7740009a --- /dev/null +++ b/routers/gateway.py @@ -0,0 +1,608 @@ +from fastapi import APIRouter, HTTPException, Depends, Query +from typing import Optional, Dict, List +import re + +from models import GatewayConfig, GatewayStatus, AddPoolRequest, AddTokenRequest +from services.gateway_service import GatewayService +from services.accounts_service import AccountsService +from deps import get_gateway_service, get_accounts_service + +router = APIRouter(tags=["Gateway"], prefix="/gateway") + + +def camel_to_snake(name: str) -> str: + """Convert camelCase to snake_case""" + name = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) + return re.sub('([a-z0-9])([A-Z])', r'\1_\2', name).lower() + + +def snake_to_camel(name: str) -> str: + """ + Convert snake_case to camelCase, handling common acronyms. + + Special cases: + - url -> URL + - cu -> CU (compute units) + - id -> ID + - api -> API + - rpc -> RPC + """ + # Map of acronyms that should be uppercase + acronyms = {'url', 'cu', 'id', 'api', 'rpc', 'uri'} + + components = name.split('_') + + # Process each component + result_parts = [components[0]] # First component stays lowercase + + for component in components[1:]: + if component.lower() in acronyms: + # Uppercase acronyms + result_parts.append(component.upper()) + else: + # Title case for normal words + result_parts.append(component.title()) + + return ''.join(result_parts) + + +def normalize_gateway_response(data: Dict) -> Dict: + """ + Normalize Gateway response data to Python conventions. + - Converts camelCase to snake_case + - Maps baseSymbol -> base, quoteSymbol -> quote + - Creates trading_pair field + """ + if isinstance(data, dict): + normalized = {} + for key, value in data.items(): + # Handle special mappings + if key == "baseSymbol": + normalized["base"] = value + elif key == "quoteSymbol": + normalized["quote"] = value + else: + # Convert to snake_case + new_key = camel_to_snake(key) + # Recursively normalize nested dicts/lists + if isinstance(value, dict): + normalized[new_key] = normalize_gateway_response(value) + elif isinstance(value, list): + normalized[new_key] = [normalize_gateway_response(item) if isinstance(item, dict) else item for item in value] + else: + normalized[new_key] = value + + # Create trading_pair if we have base and quote + if "base" in normalized and "quote" in normalized: + normalized["trading_pair"] = f"{normalized['base']}-{normalized['quote']}" + + return normalized + return data + + +# ============================================ +# Container Management +# ============================================ + +@router.get("/status", response_model=GatewayStatus) +async def get_gateway_status(gateway_service: GatewayService = Depends(get_gateway_service)): + """Get Gateway container status.""" + return gateway_service.get_status() + + +@router.post("/start") +async def start_gateway( + config: GatewayConfig, + gateway_service: GatewayService = Depends(get_gateway_service) +): + """Start Gateway container.""" + result = gateway_service.start(config) + if not result["success"]: + if "already running" in result["message"]: + raise HTTPException(status_code=400, detail=result["message"]) + raise HTTPException(status_code=500, detail=result["message"]) + return result + + +@router.post("/stop") +async def stop_gateway(gateway_service: GatewayService = Depends(get_gateway_service)): + """Stop Gateway container.""" + result = gateway_service.stop() + if not result["success"]: + if "not found" in result["message"]: + raise HTTPException(status_code=404, detail=result["message"]) + raise HTTPException(status_code=500, detail=result["message"]) + return result + + +@router.post("/restart") +async def restart_gateway( + config: Optional[GatewayConfig] = None, + gateway_service: GatewayService = Depends(get_gateway_service) +): + """ + Restart Gateway container. + + If config is provided, the container will be removed and recreated with new configuration. + If no config is provided, the container will be stopped and started with existing configuration. + """ + result = gateway_service.restart(config) + if not result["success"]: + if "not found" in result["message"]: + raise HTTPException(status_code=404, detail=result["message"]) + raise HTTPException(status_code=500, detail=result["message"]) + return result + + +@router.get("/logs") +async def get_gateway_logs( + tail: int = Query(default=100, ge=1, le=10000), + gateway_service: GatewayService = Depends(get_gateway_service) +): + """Get Gateway container logs.""" + result = gateway_service.get_logs(tail) + if not result["success"]: + if "not found" in result["message"]: + raise HTTPException(status_code=404, detail=result["message"]) + raise HTTPException(status_code=500, detail=result["message"]) + return result + + +# ============================================ +# Connectors +# ============================================ + +@router.get("/connectors") +async def list_connectors(accounts_service: AccountsService = Depends(get_accounts_service)) -> Dict: + """ + List all available DEX connectors with their configurations. + + Returns connector details including name, trading types, chain, and networks. + All fields normalized to snake_case. + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + result = await accounts_service.gateway_client._request("GET", "config/connectors") + return normalize_gateway_response(result) + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error listing connectors: {str(e)}") + + +@router.get("/connectors/{connector_name}") +async def get_connector_config( + connector_name: str, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Get configuration for a specific DEX connector. + + Args: + connector_name: Connector name (e.g., 'meteora', 'raydium') + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + result = await accounts_service.gateway_client.get_config(connector_name) + return normalize_gateway_response(result) + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error getting connector config: {str(e)}") + + +@router.post("/connectors/{connector_name}") +async def update_connector_config( + connector_name: str, + config_updates: Dict, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Update configuration for a DEX connector. + + Args: + connector_name: Connector name (e.g., 'meteora', 'raydium') + config_updates: Dict with path-value pairs to update. + Keys can be in snake_case (e.g., {"slippage_pct": 0.5}) + or camelCase (e.g., {"slippagePct": 0.5}) + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + results = [] + for path, value in config_updates.items(): + # Convert snake_case to camelCase if needed + camel_path = snake_to_camel(path) if '_' in path else path + result = await accounts_service.gateway_client.update_config(connector_name, camel_path, value) + results.append(result) + + return { + "success": True, + "message": f"Updated {len(results)} config parameter(s) for {connector_name}. Restart Gateway for changes to take effect.", + "restart_required": True, + "restart_endpoint": "POST /gateway/restart", + "results": results + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error updating connector config: {str(e)}") + + +# ============================================ +# Chains (Networks) and Tokens +# ============================================ + +@router.get("/chains") +async def list_chains(accounts_service: AccountsService = Depends(get_accounts_service)) -> Dict: + """ + List all available blockchain chains and their networks. + + This also serves as the networks list endpoint. + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + result = await accounts_service.gateway_client.get_chains() + return result + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error listing chains: {str(e)}") + + +# ============================================ +# Pools +# ============================================ + +@router.get("/pools") +async def list_pools( + connector_name: str = Query(description="DEX connector (e.g., 'meteora', 'raydium')"), + network: str = Query(description="Network (e.g., 'mainnet-beta')"), + accounts_service: AccountsService = Depends(get_accounts_service) +) -> List[Dict]: + """ + List all liquidity pools for a connector and network. + + Returns normalized data with snake_case fields and trading_pair. + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + pools = await accounts_service.gateway_client.get_pools(connector_name, network) + + if not pools: + raise HTTPException(status_code=400, detail=f"No pools found for {connector_name}/{network}") + + # Normalize each pool + normalized_pools = [normalize_gateway_response(pool) for pool in pools] + return normalized_pools + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error getting pools: {str(e)}") + + +@router.post("/pools") +async def add_pool( + pool_request: AddPoolRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Add a custom liquidity pool. + + Args: + pool_request: Pool details (connector, type, network, base, quote, address) + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + result = await accounts_service.gateway_client.add_pool( + connector=pool_request.connector_name, + pool_type=pool_request.type, + network=pool_request.network, + base_symbol=pool_request.base, + quote_symbol=pool_request.quote, + address=pool_request.address + ) + + if "error" in result: + raise HTTPException(status_code=400, detail=f"Failed to add pool: {result.get('error')}") + + trading_pair = f"{pool_request.base}-{pool_request.quote}" + return { + "message": f"Pool {trading_pair} added to {pool_request.connector_name}/{pool_request.network}", + "trading_pair": trading_pair + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error adding pool: {str(e)}") + + +# ============================================ +# Networks (Primary Endpoints) +# ============================================ + +@router.get("/networks") +async def list_networks(accounts_service: AccountsService = Depends(get_accounts_service)) -> Dict: + """ + List all available networks across all chains. + + Returns a flattened list of network IDs in the format 'chain-network'. + This is the primary interface for network discovery. + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + chains_result = await accounts_service.gateway_client.get_chains() + + # Flatten chain-network combinations into network IDs + networks = [] + if "chains" in chains_result and isinstance(chains_result["chains"], list): + for chain_item in chains_result["chains"]: + chain = chain_item.get("chain") + chain_networks = chain_item.get("networks", []) + for network in chain_networks: + network_id = f"{chain}-{network}" + networks.append({ + "network_id": network_id, + "chain": chain, + "network": network + }) + + return { + "networks": networks, + "count": len(networks) + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error listing networks: {str(e)}") + + +@router.get("/networks/{network_id}") +async def get_network_config( + network_id: str, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Get configuration for a specific network. + + Args: + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta', 'ethereum-mainnet') + + Example: GET /gateway/networks/solana-mainnet-beta + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + result = await accounts_service.gateway_client.get_config(network_id) + return normalize_gateway_response(result) + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error getting network config: {str(e)}") + + +@router.post("/networks/{network_id}") +async def update_network_config( + network_id: str, + config_updates: Dict, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Update configuration for a specific network. + + Args: + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta') + config_updates: Dict with path-value pairs to update. + Keys can be in snake_case (e.g., {"node_url": "https://..."}) + or camelCase (e.g., {"nodeURL": "https://..."}) + + Example: POST /gateway/networks/solana-mainnet-beta + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + results = [] + for path, value in config_updates.items(): + # Convert snake_case to camelCase if needed + camel_path = snake_to_camel(path) if '_' in path else path + result = await accounts_service.gateway_client.update_config(network_id, camel_path, value) + results.append(result) + + return { + "success": True, + "message": f"Updated {len(results)} config parameter(s) for {network_id}. Restart Gateway for changes to take effect.", + "restart_required": True, + "restart_endpoint": "POST /gateway/restart", + "results": results + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error updating network config: {str(e)}") + + +@router.get("/networks/{network_id}/tokens") +async def get_network_tokens( + network_id: str, + search: Optional[str] = Query(default=None), + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Get available tokens for a network. + + Args: + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta') + search: Filter tokens by symbol or name + + Example: GET /gateway/networks/solana-mainnet-beta/tokens?search=USDC + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id into chain and network + parts = network_id.split('-', 1) + if len(parts) != 2: + raise HTTPException(status_code=400, detail=f"Invalid network_id format. Expected 'chain-network', got '{network_id}'") + + chain, network = parts + result = await accounts_service.gateway_client.get_tokens(chain, network) + + # Apply search filter + if search and "tokens" in result: + search_lower = search.lower() + result["tokens"] = [ + token for token in result["tokens"] + if search_lower in token.get("symbol", "").lower() or + search_lower in token.get("name", "").lower() + ] + + return result + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error getting network tokens: {str(e)}") + + +@router.post("/networks/{network_id}/tokens") +async def add_network_token( + network_id: str, + token_request: AddTokenRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Add a custom token to Gateway's token list for a specific network. + + Args: + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta', 'ethereum-mainnet') + token_request: Token details (address, symbol, name, decimals) + + Example: POST /gateway/networks/ethereum-mainnet/tokens + { + "address": "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48", + "symbol": "USDC", + "name": "USD Coin", + "decimals": 6 + } + + Note: After adding a token, restart Gateway for changes to take effect. + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id into chain and network + parts = network_id.split('-', 1) + if len(parts) != 2: + raise HTTPException(status_code=400, detail=f"Invalid network_id format. Expected 'chain-network', got '{network_id}'") + + chain, network = parts + + # Use symbol as name if name is not provided + token_name = token_request.name if token_request.name else token_request.symbol + + result = await accounts_service.gateway_client.add_token( + chain=chain, + network=network, + address=token_request.address, + symbol=token_request.symbol, + name=token_name, + decimals=token_request.decimals + ) + + if "error" in result: + raise HTTPException(status_code=400, detail=f"Failed to add token: {result.get('error')}") + + return { + "success": True, + "message": f"Token {token_request.symbol} added to {network_id}. Restart Gateway for changes to take effect.", + "restart_required": True, + "restart_endpoint": "POST /gateway/restart", + "token": { + "symbol": token_request.symbol, + "address": token_request.address, + "network_id": network_id + } + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error adding token: {str(e)}") + + +@router.delete("/networks/{network_id}/tokens/{token_address}") +async def delete_network_token( + network_id: str, + token_address: str, + accounts_service: AccountsService = Depends(get_accounts_service) +) -> Dict: + """ + Delete a custom token from Gateway's token list for a specific network. + + Args: + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta', 'ethereum-mainnet') + token_address: Token contract address to delete + + Example: DELETE /gateway/networks/solana-mainnet-beta/tokens/9QFfgxdSqH5zT7j6rZb1y6SZhw2aFtcQu2r6BuYpump + + Note: After deleting a token, restart Gateway for changes to take effect. + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id into chain and network + parts = network_id.split('-', 1) + if len(parts) != 2: + raise HTTPException(status_code=400, detail=f"Invalid network_id format. Expected 'chain-network', got '{network_id}'") + + chain, network = parts + + result = await accounts_service.gateway_client.delete_token( + chain=chain, + network=network, + token_address=token_address + ) + + if "error" in result: + raise HTTPException(status_code=400, detail=f"Failed to delete token: {result.get('error')}") + + return { + "success": True, + "message": f"Token {token_address} deleted from {network_id}. Restart Gateway for changes to take effect.", + "restart_required": True, + "restart_endpoint": "POST /gateway/restart", + "token_address": token_address, + "network_id": network_id + } + + except HTTPException: + raise + except Exception as e: + raise HTTPException(status_code=500, detail=f"Error deleting token: {str(e)}") diff --git a/routers/gateway_clmm.py b/routers/gateway_clmm.py new file mode 100644 index 00000000..1ced78aa --- /dev/null +++ b/routers/gateway_clmm.py @@ -0,0 +1,1455 @@ +""" +Gateway CLMM Router - Handles DEX CLMM liquidity operations via Hummingbot Gateway. +Supports CLMM connectors (Meteora, Raydium, Uniswap V3) for concentrated liquidity positions. +""" +import logging +from typing import List, Optional +from decimal import Decimal +import aiohttp + +from fastapi import APIRouter, Depends, HTTPException, Query + +from deps import get_accounts_service, get_database_manager +from services.accounts_service import AccountsService +from database import AsyncDatabaseManager +from database.repositories import GatewayCLMMRepository +from models import ( + CLMMOpenPositionRequest, + CLMMOpenPositionResponse, + CLMMAddLiquidityRequest, + CLMMRemoveLiquidityRequest, + CLMMClosePositionRequest, + CLMMCollectFeesRequest, + CLMMCollectFeesResponse, + CLMMPositionsOwnedRequest, + CLMMPositionInfo, + CLMMPoolInfoResponse, + CLMMPoolListItem, + CLMMPoolListResponse, + TimeBasedMetrics, +) + +logger = logging.getLogger(__name__) + +router = APIRouter(tags=["Gateway CLMM"], prefix="/gateway") + + +async def fetch_meteora_pools( + page: int = 0, + limit: int = 50, + search_term: Optional[str] = None, + sort_key: Optional[str] = "volume", + order_by: Optional[str] = "desc", + include_unknown: bool = True +) -> Optional[dict]: + """ + Fetch available pools from Meteora API. + + Args: + page: Page number (default: 0) + limit: Results per page (default: 50) + search_term: Search term to filter pools + sort_key: Sort key (tvl, volume, feetvlratio, etc.) + order_by: Sort order (asc, desc) + include_unknown: Include pools with unverified tokens + + Returns: + Dictionary with pools from Meteora API, or None if failed + """ + try: + url = "https://dlmm-api.meteora.ag/pair/all_by_groups" + params = { + "page": page, + "limit": limit, + "include_unknown": str(include_unknown).lower() # Convert boolean to lowercase string + } + + if search_term: + params["search_term"] = search_term + if sort_key: + params["sort_key"] = sort_key + if order_by: + params["order_by"] = order_by + + async with aiohttp.ClientSession() as session: + async with session.get(url, params=params, headers={"accept": "application/json"}) as response: + response.raise_for_status() + data = await response.json() + return data + except aiohttp.ClientError as e: + logger.error(f"Failed to fetch pools from Meteora API: {e}") + return None + except Exception as e: + logger.error(f"Error fetching Meteora pools: {e}", exc_info=True) + return None + + +async def fetch_raydium_pool_info(pool_address: str) -> Optional[dict]: + """ + Fetch pool info from Raydium API. + + Args: + pool_address: Pool contract address + + Returns: + Dictionary with pool info from Raydium API, or None if failed + """ + try: + url = f"https://api-v3.raydium.io/pools/line/position?id={pool_address}" + async with aiohttp.ClientSession() as session: + async with session.get(url, headers={"accept": "application/json"}) as response: + response.raise_for_status() + data = await response.json() + + if not data.get("success"): + logger.error(f"Raydium API returned unsuccessful response: {data}") + return None + + return data + except aiohttp.ClientError as e: + logger.error(f"Failed to fetch pool info from Raydium API: {e}") + return None + except Exception as e: + logger.error(f"Error fetching Raydium pool info: {e}", exc_info=True) + return None + + +def transform_raydium_to_clmm_response(raydium_data: dict, pool_address: str) -> dict: + """ + Transform Raydium API response to match Gateway's CLMMPoolInfoResponse format. + + Args: + raydium_data: Response from Raydium API + pool_address: Pool contract address + + Returns: + Dictionary matching Gateway's pool info structure + """ + pool_data = raydium_data.get("data", {}) + line_data = pool_data.get("line", []) + + if not line_data: + raise ValueError("No liquidity bins found in Raydium pool data") + + # Sort bins by tick to find the active bin + sorted_bins = sorted(line_data, key=lambda x: x.get("tick", 0)) + + # Calculate active bin (the one with mid-range tick) + # For Raydium, we need to determine the current active bin based on the pool state + # We'll use the middle bin as a proxy for active bin + active_bin_idx = len(sorted_bins) // 2 + active_bin = sorted_bins[active_bin_idx] + + # Calculate total liquidity across all bins + total_base_liquidity = sum(Decimal(str(bin_data.get("liquidity", 0))) for bin_data in line_data) + total_quote_liquidity = total_base_liquidity # Approximation + + # Extract min and max ticks + min_tick = sorted_bins[0].get("tick", 0) if sorted_bins else 0 + max_tick = sorted_bins[-1].get("tick", 0) if sorted_bins else 0 + + # Convert ticks to bin IDs (assuming 1:1 mapping for simplicity) + min_bin_id = min_tick + max_bin_id = max_tick + active_bin_id = active_bin.get("tick", 0) + + # Get current price from active bin + current_price = Decimal(str(active_bin.get("price", 0))) + + # Transform bins to match Gateway format + bins = [] + for bin_data in line_data[:100]: # Limit to 100 bins for performance + liquidity = Decimal(str(bin_data.get("liquidity", 0))) + bins.append({ + "binId": bin_data.get("tick", 0), + "price": Decimal(str(bin_data.get("price", 0))), + "baseTokenAmount": liquidity, + "quoteTokenAmount": liquidity # Approximation + }) + + # Return in Gateway-compatible format + return { + "address": pool_address, + "baseTokenAddress": "unknown", # Not provided by Raydium API + "quoteTokenAddress": "unknown", # Not provided by Raydium API + "binStep": 1, # Default value, not provided by Raydium API + "feePct": Decimal("0.25"), # Typical Raydium CLMM fee + "price": current_price, + "baseTokenAmount": total_base_liquidity, + "quoteTokenAmount": total_quote_liquidity, + "activeBinId": active_bin_id, + "dynamicFeePct": None, + "minBinId": min_bin_id, + "maxBinId": max_bin_id, + "bins": bins + } + + +def get_transaction_status_from_response(gateway_response: dict) -> str: + """ + Determine transaction status from Gateway response. + + Gateway returns status field in the response: + - status: 1 = confirmed + - status: 0 = pending/submitted + + Returns: + "CONFIRMED" if status == 1 + "SUBMITTED" if status == 0 or not present + """ + status = gateway_response.get("status") + + # Status 1 means transaction is confirmed on-chain + if status == 1: + return "CONFIRMED" + + # Status 0 or missing means submitted but not confirmed yet + return "SUBMITTED" + + +def get_native_gas_token(chain: str) -> str: + """ + Get the native gas token symbol for a blockchain. + + Args: + chain: Blockchain name (e.g., 'solana', 'ethereum', 'polygon') + + Returns: + Gas token symbol (e.g., 'SOL', 'ETH', 'MATIC') + """ + gas_token_map = { + "solana": "SOL", + "ethereum": "ETH", + "polygon": "MATIC", + "avalanche": "AVAX", + "optimism": "ETH", + "arbitrum": "ETH", + "base": "ETH", + "bsc": "BNB", + "cronos": "CRO", + } + return gas_token_map.get(chain.lower(), "UNKNOWN") + + +async def _refresh_position_data(position, accounts_service: AccountsService, clmm_repo: GatewayCLMMRepository): + """ + Refresh position data from Gateway and update database. + + This updates: + - in_range status + - liquidity amounts + - pending fees + - position status (if closed externally) + """ + try: + # Parse network to get chain and network name + chain, network = accounts_service.gateway_client.parse_network_id(position.network) + + # Get wallet address for the position + wallet_address = position.wallet_address + + # Get all positions for this pool and find our specific position + try: + positions_list = await accounts_service.gateway_client.clmm_positions_owned( + connector=position.connector, + network=network, + wallet_address=wallet_address, + pool_address=position.pool_address + ) + + # Find our specific position in the list + result = None + if isinstance(positions_list, list): + for pos in positions_list: + if pos.get("address") == position.position_address: + result = pos + break + + # If position not found, it was closed externally + if result is None: + logger.info(f"Position {position.position_address} not found on Gateway, marking as CLOSED") + await clmm_repo.close_position(position.position_address) + return + + except Exception as e: + # If we can't fetch positions, log error but don't mark as closed + logger.error(f"Error fetching position from Gateway: {e}") + return + + # Extract current state + current_price = Decimal(str(result.get("price", 0))) + lower_price = Decimal(str(result.get("lowerPrice", 0))) if result.get("lowerPrice") else Decimal("0") + upper_price = Decimal(str(result.get("upperPrice", 0))) if result.get("upperPrice") else Decimal("0") + + # Calculate in_range status + in_range = "UNKNOWN" + if current_price > 0 and lower_price > 0 and upper_price > 0: + if lower_price <= current_price <= upper_price: + in_range = "IN_RANGE" + else: + in_range = "OUT_OF_RANGE" + + # Extract token amounts + base_token_amount = Decimal(str(result.get("baseTokenAmount", 0))) + quote_token_amount = Decimal(str(result.get("quoteTokenAmount", 0))) + + # Check if position has been closed (zero liquidity) + if base_token_amount == 0 and quote_token_amount == 0: + logger.info(f"Position {position.position_address} has zero liquidity, marking as CLOSED") + await clmm_repo.close_position(position.position_address) + return + + # Update liquidity amounts and in_range status + await clmm_repo.update_position_liquidity( + position_address=position.position_address, + base_token_amount=base_token_amount, + quote_token_amount=quote_token_amount, + in_range=in_range + ) + + # Update pending fees if available + base_fee_pending = Decimal(str(result.get("baseFeeAmount", 0))) + quote_fee_pending = Decimal(str(result.get("quoteFeeAmount", 0))) + + if base_fee_pending or quote_fee_pending: + await clmm_repo.update_position_fees( + position_address=position.position_address, + base_fee_pending=base_fee_pending, + quote_fee_pending=quote_fee_pending + ) + + logger.debug(f"Refreshed position {position.position_address}: in_range={in_range}, " + f"base={base_token_amount}, quote={quote_token_amount}") + + except Exception as e: + logger.error(f"Error refreshing position {position.position_address}: {e}", exc_info=True) + raise + + +@router.get("/clmm/pool-info", response_model=CLMMPoolInfoResponse, response_model_by_alias=False) +async def get_clmm_pool_info( + connector: str, + network: str, + pool_address: str, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get detailed information about a CLMM pool by pool address. + + Args: + connector: CLMM connector (e.g., 'meteora', 'raydium') + network: Network ID in 'chain-network' format (e.g., 'solana-mainnet-beta') + pool_address: Pool contract address + + Example: + GET /gateway/clmm/pool-info?connector=meteora&network=solana-mainnet-beta&pool_address=2sf5NYcY4zUPXUSmG6f66mskb24t5F8S11pC1Nz5nQT3 + + Returns: + Pool information including liquidity, price, bins (for Meteora), etc. + All field names are returned in snake_case format. + + Note: + For Raydium connector, uses Raydium API directly instead of Gateway. + """ + try: + # Special handling for Raydium - use Raydium API directly (not Gateway) + if connector.lower() == "raydium": + logger.info(f"Using Raydium API directly for pool info: {pool_address}") + + # Fetch from Raydium API + raydium_data = await fetch_raydium_pool_info(pool_address) + if raydium_data is None: + raise HTTPException(status_code=503, detail="Failed to get pool info from Raydium API") + + # Transform to Gateway-compatible format + result = transform_raydium_to_clmm_response(raydium_data, pool_address) + + # Parse into response model + return CLMMPoolInfoResponse(**result) + + # Default behavior for other connectors: use Gateway + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network_name = accounts_service.gateway_client.parse_network_id(network) + + # Get pool info from Gateway using the CLMM-specific endpoint + result = await accounts_service.gateway_client.clmm_pool_info( + connector=connector, + network=network_name, + pool_address=pool_address + ) + + if result is None: + raise HTTPException(status_code=503, detail="Failed to get pool info from Gateway") + + # Parse the camelCase Gateway response into snake_case Pydantic model + # The model's aliases will handle the conversion + return CLMMPoolInfoResponse(**result) + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error getting CLMM pool info: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting CLMM pool info: {str(e)}") + + +@router.get("/clmm/pools", response_model=CLMMPoolListResponse) +async def get_clmm_pools( + connector: str, + page: int = Query(0, ge=0, description="Page number"), + limit: int = Query(50, ge=1, le=100, description="Results per page (max 100)"), + search_term: Optional[str] = Query(None, description="Search term to filter pools"), + sort_key: Optional[str] = Query("volume", description="Sort key (volume, tvl, etc.)"), + order_by: Optional[str] = Query("desc", description="Sort order (asc, desc)"), + include_unknown: bool = Query(True, description="Include pools with unverified tokens") +): + """ + Get list of available CLMM pools for a connector. + + Currently supports: meteora + + Args: + connector: CLMM connector (e.g., 'meteora') + page: Page number (default: 0) + limit: Results per page (default: 50, max: 100) + search_term: Search term to filter pools (optional) + sort_key: Sort by field (volume, tvl, feetvlratio, etc.) + order_by: Sort order (asc, desc) + include_unknown: Include pools with unverified tokens + + Example: + GET /gateway/clmm/pools?connector=meteora&search_term=SOL&limit=20 + + Returns: + List of available pools with trading pairs, addresses, liquidity, volume, APR, etc. + """ + try: + # Only support Meteora for now + if connector.lower() != "meteora": + raise HTTPException( + status_code=400, + detail=f"Pool listing not supported for connector '{connector}'. Currently only 'meteora' is supported." + ) + + # Fetch pools from Meteora API + logger.info(f"Fetching pools from Meteora API (page={page}, limit={limit}, search={search_term})") + meteora_data = await fetch_meteora_pools( + page=page, + limit=limit, + search_term=search_term, + sort_key=sort_key, + order_by=order_by, + include_unknown=include_unknown + ) + + if meteora_data is None: + raise HTTPException(status_code=503, detail="Failed to fetch pools from Meteora API") + + # Transform Meteora response to our format + pools = [] + groups = meteora_data.get("groups", []) + + for group in groups: + pairs = group.get("pairs", []) + for pair in pairs: + # Extract trading pair from name or construct from mints + name = pair.get("name", "") + trading_pair = name if name else f"{pair.get('mint_x', '')[:8]}-{pair.get('mint_y', '')[:8]}" + + # Helper function to safely convert dict metrics to TimeBasedMetrics + def to_time_metrics(data): + if not data: + return None + return TimeBasedMetrics( + min_30=Decimal(str(data.get("min_30"))) if data.get("min_30") is not None else None, + hour_1=Decimal(str(data.get("hour_1"))) if data.get("hour_1") is not None else None, + hour_2=Decimal(str(data.get("hour_2"))) if data.get("hour_2") is not None else None, + hour_4=Decimal(str(data.get("hour_4"))) if data.get("hour_4") is not None else None, + hour_12=Decimal(str(data.get("hour_12"))) if data.get("hour_12") is not None else None, + hour_24=Decimal(str(data.get("hour_24"))) if data.get("hour_24") is not None else None + ) + + pools.append(CLMMPoolListItem( + address=pair.get("address", ""), + name=name, + trading_pair=trading_pair, + mint_x=pair.get("mint_x", ""), + mint_y=pair.get("mint_y", ""), + bin_step=pair.get("bin_step", 0), + current_price=Decimal(str(pair.get("current_price", 0))), + liquidity=pair.get("liquidity", "0"), + reserve_x=pair.get("reserve_x", "0"), + reserve_y=pair.get("reserve_y", "0"), + reserve_x_amount=Decimal(str(pair.get("reserve_x_amount"))) if pair.get("reserve_x_amount") is not None else None, + reserve_y_amount=Decimal(str(pair.get("reserve_y_amount"))) if pair.get("reserve_y_amount") is not None else None, + + # Fee structure + base_fee_percentage=pair.get("base_fee_percentage"), + max_fee_percentage=pair.get("max_fee_percentage"), + protocol_fee_percentage=pair.get("protocol_fee_percentage"), + + # APR/APY + apr=Decimal(str(pair.get("apr", 0))) if pair.get("apr") is not None else None, + apy=Decimal(str(pair.get("apy", 0))) if pair.get("apy") is not None else None, + farm_apr=Decimal(str(pair.get("farm_apr"))) if pair.get("farm_apr") is not None else None, + farm_apy=Decimal(str(pair.get("farm_apy"))) if pair.get("farm_apy") is not None else None, + + # Volume and fees + volume_24h=Decimal(str(pair.get("trade_volume_24h", 0))) if pair.get("trade_volume_24h") is not None else None, + fees_24h=Decimal(str(pair.get("fees_24h", 0))) if pair.get("fees_24h") is not None else None, + today_fees=Decimal(str(pair.get("today_fees"))) if pair.get("today_fees") is not None else None, + cumulative_trade_volume=pair.get("cumulative_trade_volume"), + cumulative_fee_volume=pair.get("cumulative_fee_volume"), + + # Time-based metrics + volume=to_time_metrics(pair.get("volume")), + fees=to_time_metrics(pair.get("fees")), + fee_tvl_ratio=to_time_metrics(pair.get("fee_tvl_ratio")), + + # Rewards + reward_mint_x=pair.get("reward_mint_x"), + reward_mint_y=pair.get("reward_mint_y"), + + # Metadata + tags=pair.get("tags"), + is_verified=pair.get("is_verified", False), + is_blacklisted=pair.get("is_blacklisted"), + hide=pair.get("hide"), + launchpad=pair.get("launchpad") + )) + + total = meteora_data.get("total", len(pools)) + + return CLMMPoolListResponse( + pools=pools, + total=total, + page=page, + limit=limit + ) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting CLMM pools: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting CLMM pools: {str(e)}") + + +@router.post("/clmm/open", response_model=CLMMOpenPositionResponse) +async def open_clmm_position( + request: CLMMOpenPositionRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Open a NEW CLMM position with initial liquidity. + + Example: + connector: 'meteora' + network: 'solana-mainnet-beta' + pool_address: '2sf5NYcY4zUPXUSmG6f66mskb24t5F8S11pC1Nz5nQT3' + lower_price: 150 + upper_price: 250 + base_token_amount: 0.01 + quote_token_amount: 2 + slippage_pct: 1 + wallet_address: (optional) + extra_params: {"strategyType": 0} # Meteora-specific + + Returns: + Transaction hash and position address + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network = accounts_service.gateway_client.parse_network_id(request.network) + + # Get wallet address + wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( + chain=chain, + wallet_address=request.wallet_address + ) + + # Get pool info to extract trading pair for database + pool_info = await accounts_service.gateway_client.clmm_pool_info( + connector=request.connector, + network=network, + pool_address=request.pool_address + ) + + # Extract tokens from pool info + base_token_address = pool_info.get("baseTokenAddress", "") + quote_token_address = pool_info.get("quoteTokenAddress", "") + + # Store full token addresses in the database + base = base_token_address if base_token_address else "UNKNOWN" + quote = quote_token_address if quote_token_address else "UNKNOWN" + trading_pair = f"{base}-{quote}" + + # Open position + result = await accounts_service.gateway_client.clmm_open_position( + connector=request.connector, + network=network, + wallet_address=wallet_address, + pool_address=request.pool_address, + lower_price=float(request.lower_price), + upper_price=float(request.upper_price), + base_token_amount=float(request.base_token_amount) if request.base_token_amount else None, + quote_token_amount=float(request.quote_token_amount) if request.quote_token_amount else None, + slippage_pct=float(request.slippage_pct) if request.slippage_pct else 1.0, + extra_params=request.extra_params + ) + if not result: + raise HTTPException(status_code=404, detail=f"Failed to open CLMM position: {trading_pair}") + + transaction_hash = result.get("signature") or result.get("txHash") or result.get("hash") + + # Position address can be at root level or nested in data object + data = result.get("data", {}) + position_address = result.get("positionAddress") or result.get("position") or data.get("positionAddress") or data.get("position") + + # Extract position rent (SOL locked for position NFT) + position_rent = data.get("positionRent") + if position_rent: + logger.info(f"Position rent: {position_rent} SOL") + + if not transaction_hash: + raise HTTPException(status_code=500, detail="No transaction hash returned from Gateway") + if not position_address: + raise HTTPException(status_code=500, detail="No position address returned from Gateway") + + # Calculate percentage: (upper_price - lower_price) / lower_price + percentage = None + if request.lower_price and request.upper_price and request.lower_price > 0: + percentage = float((request.upper_price - request.lower_price) / request.lower_price) + logger.info(f"Position price range percentage: {percentage:.4f} ({percentage*100:.2f}%)") + + # Get transaction status from Gateway response + tx_status = get_transaction_status_from_response(result) + + # Extract gas fee from Gateway response + gas_fee = data.get("fee") + gas_token = get_native_gas_token(chain) + + # Store position and event in database + try: + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + + # Create position record + position_data = { + "position_address": position_address, + "pool_address": request.pool_address, + "network": request.network, + "connector": request.connector, + "wallet_address": wallet_address, + "trading_pair": trading_pair, + "base_token": base, + "quote_token": quote, + "status": "OPEN", + "lower_price": float(request.lower_price), + "upper_price": float(request.upper_price), + "percentage": percentage, + "initial_base_token_amount": float(request.base_token_amount) if request.base_token_amount else 0, + "initial_quote_token_amount": float(request.quote_token_amount) if request.quote_token_amount else 0, + "position_rent": float(position_rent) if position_rent else None, + "base_token_amount": float(request.base_token_amount) if request.base_token_amount else 0, + "quote_token_amount": float(request.quote_token_amount) if request.quote_token_amount else 0, + "in_range": "UNKNOWN" # Will be updated by poller + } + + position = await clmm_repo.create_position(position_data) + logger.info(f"Recorded CLMM position in database: {position_address}") + + # Create OPEN event with polled status + event_data = { + "position_id": position.id, + "transaction_hash": transaction_hash, + "event_type": "OPEN", + "base_token_amount": float(request.base_token_amount) if request.base_token_amount else None, + "quote_token_amount": float(request.quote_token_amount) if request.quote_token_amount else None, + "gas_fee": float(gas_fee) if gas_fee else None, + "gas_token": gas_token, + "status": tx_status + } + + await clmm_repo.create_event(event_data) + logger.info(f"Recorded CLMM OPEN event in database: {transaction_hash} (status: {tx_status}, gas: {gas_fee} {gas_token})") + except Exception as db_error: + # Log but don't fail the operation - it was submitted successfully + logger.error(f"Error recording CLMM position in database: {db_error}", exc_info=True) + + return CLMMOpenPositionResponse( + transaction_hash=transaction_hash, + position_address=position_address, + trading_pair=trading_pair, + pool_address=request.pool_address, + lower_price=request.lower_price, + upper_price=request.upper_price, + status="submitted" + ) + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error opening CLMM position: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error opening CLMM position: {str(e)}") + + +# @router.post("/clmm/add") +# async def add_liquidity_to_clmm_position( +# request: CLMMAddLiquidityRequest, +# accounts_service: AccountsService = Depends(get_accounts_service), +# db_manager: AsyncDatabaseManager = Depends(get_database_manager) +# ): +# """ +# Add MORE liquidity to an EXISTING CLMM position. +# +# Example: +# connector: 'meteora' +# network: 'solana-mainnet-beta' +# position_address: '...' +# base_token_amount: 0.5 +# quote_token_amount: 50.0 +# slippage_pct: 1 +# wallet_address: (optional) +# +# Returns: +# Transaction hash +# """ +# try: +# if not await accounts_service.gateway_client.ping(): +# raise HTTPException(status_code=503, detail="Gateway service is not available") +# +# # Parse network_id +# chain, network = accounts_service.gateway_client.parse_network_id(request.network) +# +# # Get wallet address +# wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( +# chain=chain, +# wallet_address=request.wallet_address +# ) +# +# # Add liquidity to existing position +# result = await accounts_service.gateway_client.clmm_add_liquidity( +# connector=request.connector, +# network=network, +# wallet_address=wallet_address, +# position_address=request.position_address, +# base_token_amount=float(request.base_token_amount) if request.base_token_amount else None, +# quote_token_amount=float(request.quote_token_amount) if request.quote_token_amount else None, +# slippage_pct=float(request.slippage_pct) if request.slippage_pct else 1.0 +# ) +# +# transaction_hash = result.get("signature") or result.get("txHash") or result.get("hash") +# if not transaction_hash: +# raise HTTPException(status_code=500, detail="No transaction hash returned from Gateway") +# +# # Get transaction status from Gateway response +# tx_status = get_transaction_status_from_response(result) +# +# # Extract gas fee from Gateway response +# data = result.get("data", {}) +# gas_fee = data.get("fee") +# gas_token = "SOL" if chain == "solana" else "ETH" if chain == "ethereum" else None +# +# # Store ADD_LIQUIDITY event in database +# try: +# async with db_manager.get_session_context() as session: +# clmm_repo = GatewayCLMMRepository(session) +# +# # Get position to link event +# position = await clmm_repo.get_position_by_address(request.position_address) +# if position: +# event_data = { +# "position_id": position.id, +# "transaction_hash": transaction_hash, +# "event_type": "ADD_LIQUIDITY", +# "base_token_amount": float(request.base_token_amount) if request.base_token_amount else None, +# "quote_token_amount": float(request.quote_token_amount) if request.quote_token_amount else None, +# "gas_fee": float(gas_fee) if gas_fee else None, +# "gas_token": gas_token, +# "status": tx_status +# } +# await clmm_repo.create_event(event_data) +# logger.info(f"Recorded CLMM ADD_LIQUIDITY event: {transaction_hash} (status: {tx_status}, gas: {gas_fee} {gas_token})") +# except Exception as db_error: +# logger.error(f"Error recording ADD_LIQUIDITY event: {db_error}", exc_info=True) +# +# return { +# "transaction_hash": transaction_hash, +# "position_address": request.position_address, +# "status": "submitted" +# } +# +# except HTTPException: +# raise +# except ValueError as e: +# raise HTTPException(status_code=400, detail=str(e)) +# except Exception as e: +# logger.error(f"Error adding liquidity to CLMM position: {e}", exc_info=True) +# raise HTTPException(status_code=500, detail=f"Error adding liquidity to CLMM position: {str(e)}") +# +# +# @router.post("/clmm/remove") +# async def remove_liquidity_from_clmm_position( +# request: CLMMRemoveLiquidityRequest, +# accounts_service: AccountsService = Depends(get_accounts_service), +# db_manager: AsyncDatabaseManager = Depends(get_database_manager) +# ): +# """ +# Remove SOME liquidity from a CLMM position (partial removal). +# +# Example: +# connector: 'meteora' +# network: 'solana-mainnet-beta' +# position_address: '...' +# percentage: 50 +# wallet_address: (optional) +# +# Returns: +# Transaction hash +# """ +# try: +# if not await accounts_service.gateway_client.ping(): +# raise HTTPException(status_code=503, detail="Gateway service is not available") +# +# # Parse network_id +# chain, network = accounts_service.gateway_client.parse_network_id(request.network) +# +# # Get wallet address +# wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( +# chain=chain, +# wallet_address=request.wallet_address +# ) +# +# # Remove liquidity +# result = await accounts_service.gateway_client.clmm_remove_liquidity( +# connector=request.connector, +# network=network, +# wallet_address=wallet_address, +# position_address=request.position_address, +# percentage=float(request.percentage) +# ) +# +# transaction_hash = result.get("signature") or result.get("txHash") or result.get("hash") +# if not transaction_hash: +# raise HTTPException(status_code=500, detail="No transaction hash returned from Gateway") +# +# # Get transaction status from Gateway response +# tx_status = get_transaction_status_from_response(result) +# +# # Extract gas fee from Gateway response +# data = result.get("data", {}) +# gas_fee = data.get("fee") +# gas_token = "SOL" if chain == "solana" else "ETH" if chain == "ethereum" else None +# +# # Store REMOVE_LIQUIDITY event in database +# try: +# async with db_manager.get_session_context() as session: +# clmm_repo = GatewayCLMMRepository(session) +# +# # Get position to link event +# position = await clmm_repo.get_position_by_address(request.position_address) +# if position: +# event_data = { +# "position_id": position.id, +# "transaction_hash": transaction_hash, +# "event_type": "REMOVE_LIQUIDITY", +# "percentage": float(request.percentage), +# "gas_fee": float(gas_fee) if gas_fee else None, +# "gas_token": gas_token, +# "status": tx_status +# } +# await clmm_repo.create_event(event_data) +# logger.info(f"Recorded CLMM REMOVE_LIQUIDITY event: {transaction_hash} (status: {tx_status}, gas: {gas_fee} {gas_token})") +# except Exception as db_error: +# logger.error(f"Error recording REMOVE_LIQUIDITY event: {db_error}", exc_info=True) +# +# return { +# "transaction_hash": transaction_hash, +# "position_address": request.position_address, +# "percentage": float(request.percentage), +# "status": "submitted" +# } +# +# except HTTPException: +# raise +# except ValueError as e: +# raise HTTPException(status_code=400, detail=str(e)) +# except Exception as e: +# logger.error(f"Error removing liquidity from CLMM position: {e}", exc_info=True) +# raise HTTPException(status_code=500, detail=f"Error removing liquidity from CLMM position: {str(e)}") +# + +@router.post("/clmm/close", response_model=CLMMCollectFeesResponse) +async def close_clmm_position( + request: CLMMClosePositionRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + CLOSE a CLMM position completely (removes all liquidity and collects pending fees). + + Example: + connector: 'meteora' + network: 'solana-mainnet-beta' + position_address: '...' + wallet_address: (optional) + + Returns: + Transaction hash and collected fee amounts + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network = accounts_service.gateway_client.parse_network_id(request.network) + + # Get pool_address and wallet_address from database + pool_address = None + wallet_address = None + + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + db_position = await clmm_repo.get_position_by_address(request.position_address) + if db_position: + pool_address = db_position.pool_address + wallet_address = db_position.wallet_address + + # If not in database, use default wallet + if not wallet_address: + wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( + chain=chain, + wallet_address=request.wallet_address + ) + + # If no pool_address from database, we can't query Gateway + if not pool_address: + raise HTTPException( + status_code=404, + detail=f"Position {request.position_address} not found in database. Pool address is required." + ) + + # Fetch pending fees BEFORE closing (Gateway doesn't always return collected amounts in response) + base_fee_to_collect = Decimal("0") + quote_fee_to_collect = Decimal("0") + + try: + positions_list = await accounts_service.gateway_client.clmm_positions_owned( + connector=request.connector, + network=network, + wallet_address=wallet_address, + pool_address=pool_address + ) + + # Find our specific position and get pending fees + if positions_list and isinstance(positions_list, list): + for pos in positions_list: + if pos and pos.get("address") == request.position_address: + base_fee_to_collect = Decimal(str(pos.get("baseFeeAmount", 0))) + quote_fee_to_collect = Decimal(str(pos.get("quoteFeeAmount", 0))) + logger.info(f"Pending fees before closing: base={base_fee_to_collect}, quote={quote_fee_to_collect}") + break + else: + logger.warning(f"Could not find position {request.position_address} in positions_owned response") + except Exception as e: + logger.warning(f"Could not fetch pending fees before closing: {e}", exc_info=True) + + # Close position + result = await accounts_service.gateway_client.clmm_close_position( + connector=request.connector, + network=network, + wallet_address=wallet_address, + position_address=request.position_address + ) + + transaction_hash = result.get("signature") or result.get("txHash") or result.get("hash") + if not transaction_hash: + raise HTTPException(status_code=500, detail="No transaction hash returned from Gateway") + + # Get transaction status from Gateway response + tx_status = get_transaction_status_from_response(result) + + # Extract gas fee from Gateway response + data = result.get("data", {}) + gas_fee = data.get("fee") + gas_token = get_native_gas_token(chain) + + # Try to extract collected amounts from Gateway response, fallback to pre-fetched amounts + base_fee_from_response = data.get("baseFeeAmountCollected") + quote_fee_from_response = data.get("quoteFeeAmountCollected") + + # Use response values if available, otherwise use pre-fetched values + base_fee_collected = Decimal(str(base_fee_from_response)) if base_fee_from_response is not None else base_fee_to_collect + quote_fee_collected = Decimal(str(quote_fee_from_response)) if quote_fee_from_response is not None else quote_fee_to_collect + + logger.info(f"Collected fees on close: base={base_fee_collected}, quote={quote_fee_collected}") + + # Store CLOSE event in database and update position + try: + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + + # Get position to link event + position = await clmm_repo.get_position_by_address(request.position_address) + if position: + # Create event record + event_data = { + "position_id": position.id, + "transaction_hash": transaction_hash, + "event_type": "CLOSE", + "base_fee_collected": float(base_fee_collected) if base_fee_collected else None, + "quote_fee_collected": float(quote_fee_collected) if quote_fee_collected else None, + "gas_fee": float(gas_fee) if gas_fee else None, + "gas_token": gas_token, + "status": tx_status + } + await clmm_repo.create_event(event_data) + logger.info(f"Recorded CLMM CLOSE event: {transaction_hash} (status: {tx_status}, gas: {gas_fee} {gas_token})") + + # Update position: add to collected, reset pending to 0, mark as CLOSED + new_base_collected = Decimal(str(position.base_fee_collected)) + base_fee_collected + new_quote_collected = Decimal(str(position.quote_fee_collected)) + quote_fee_collected + + await clmm_repo.update_position_fees( + position_address=request.position_address, + base_fee_collected=new_base_collected, + quote_fee_collected=new_quote_collected, + base_fee_pending=Decimal("0"), + quote_fee_pending=Decimal("0") + ) + + # Mark position as CLOSED + await clmm_repo.close_position(request.position_address) + logger.info(f"Updated position {request.position_address}: collected fees updated, pending fees reset to 0, status set to CLOSED") + except Exception as db_error: + logger.error(f"Error recording CLOSE event: {db_error}", exc_info=True) + + return CLMMCollectFeesResponse( + transaction_hash=transaction_hash, + position_address=request.position_address, + base_fee_collected=Decimal(str(base_fee_collected)) if base_fee_collected else None, + quote_fee_collected=Decimal(str(quote_fee_collected)) if quote_fee_collected else None, + status="submitted" + ) + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error closing CLMM position: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error closing CLMM position: {str(e)}") + + +@router.post("/clmm/collect-fees", response_model=CLMMCollectFeesResponse) +async def collect_fees_from_clmm_position( + request: CLMMCollectFeesRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Collect accumulated fees from a CLMM liquidity position. + + Example: + connector: 'meteora' + network: 'solana-mainnet-beta' + position_address: '...' + wallet_address: (optional) + + Returns: + Transaction hash and collected fee amounts + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network = accounts_service.gateway_client.parse_network_id(request.network) + + # Get pool_address and wallet_address from database + pool_address = None + wallet_address = None + + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + db_position = await clmm_repo.get_position_by_address(request.position_address) + if db_position: + pool_address = db_position.pool_address + wallet_address = db_position.wallet_address + + # If not in database, use default wallet + if not wallet_address: + wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( + chain=chain, + wallet_address=request.wallet_address + ) + + # If no pool_address from database, we can't query Gateway + if not pool_address: + raise HTTPException( + status_code=404, + detail=f"Position {request.position_address} not found in database. Pool address is required." + ) + + # Fetch pending fees BEFORE collecting (Gateway doesn't always return collected amounts in response) + base_fee_to_collect = Decimal("0") + quote_fee_to_collect = Decimal("0") + + try: + positions_list = await accounts_service.gateway_client.clmm_positions_owned( + connector=request.connector, + network=network, + wallet_address=wallet_address, + pool_address=pool_address + ) + + # Find our specific position and get pending fees + if positions_list and isinstance(positions_list, list): + for pos in positions_list: + if pos and pos.get("address") == request.position_address: + base_fee_to_collect = Decimal(str(pos.get("baseFeeAmount", 0))) + quote_fee_to_collect = Decimal(str(pos.get("quoteFeeAmount", 0))) + logger.info(f"Pending fees before collection: base={base_fee_to_collect}, quote={quote_fee_to_collect}") + break + else: + logger.warning(f"Could not find position {request.position_address} in positions_owned response") + except Exception as e: + logger.warning(f"Could not fetch pending fees before collection: {e}", exc_info=True) + + # Collect fees + result = await accounts_service.gateway_client.clmm_collect_fees( + connector=request.connector, + network=network, + wallet_address=wallet_address, + position_address=request.position_address + ) + + if not result: + raise HTTPException(status_code=500, detail="No response from Gateway collect-fees endpoint") + + transaction_hash = result.get("signature") or result.get("txHash") or result.get("hash") + if not transaction_hash: + raise HTTPException(status_code=500, detail="No transaction hash returned from Gateway") + + # Get transaction status from Gateway response + tx_status = get_transaction_status_from_response(result) + + # Try to extract collected amounts from Gateway response, fallback to pre-fetched amounts + data = result.get("data", {}) + base_fee_from_response = data.get("baseFeeAmountCollected") + quote_fee_from_response = data.get("quoteFeeAmountCollected") + + # Use response values if available, otherwise use pre-fetched values + base_fee_collected = Decimal(str(base_fee_from_response)) if base_fee_from_response is not None else base_fee_to_collect + quote_fee_collected = Decimal(str(quote_fee_from_response)) if quote_fee_from_response is not None else quote_fee_to_collect + + # Extract gas fee from Gateway response + gas_fee = data.get("fee") + gas_token = get_native_gas_token(chain) + + logger.info(f"Collected fees: base={base_fee_collected}, quote={quote_fee_collected}") + + # Store COLLECT_FEES event in database and update position + try: + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + + # Get position to link event + position = await clmm_repo.get_position_by_address(request.position_address) + if position: + # Create event record + event_data = { + "position_id": position.id, + "transaction_hash": transaction_hash, + "event_type": "COLLECT_FEES", + "base_fee_collected": float(base_fee_collected) if base_fee_collected else None, + "quote_fee_collected": float(quote_fee_collected) if quote_fee_collected else None, + "gas_fee": float(gas_fee) if gas_fee else None, + "gas_token": gas_token, + "status": tx_status + } + await clmm_repo.create_event(event_data) + logger.info(f"Recorded CLMM COLLECT_FEES event: {transaction_hash} (status: {tx_status}, gas: {gas_fee} {gas_token})") + + # Update position: add to collected, reset pending to 0 + new_base_collected = Decimal(str(position.base_fee_collected)) + base_fee_collected + new_quote_collected = Decimal(str(position.quote_fee_collected)) + quote_fee_collected + + await clmm_repo.update_position_fees( + position_address=request.position_address, + base_fee_collected=new_base_collected, + quote_fee_collected=new_quote_collected, + base_fee_pending=Decimal("0"), + quote_fee_pending=Decimal("0") + ) + logger.info(f"Updated position {request.position_address}: collected fees updated, pending fees reset to 0") + except Exception as db_error: + logger.error(f"Error recording COLLECT_FEES event: {db_error}", exc_info=True) + + return CLMMCollectFeesResponse( + transaction_hash=transaction_hash, + position_address=request.position_address, + base_fee_collected=Decimal(str(base_fee_collected)) if base_fee_collected else None, + quote_fee_collected=Decimal(str(quote_fee_collected)) if quote_fee_collected else None, + status="submitted" + ) + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error collecting fees: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error collecting fees: {str(e)}") + + +@router.post("/clmm/positions_owned", response_model=List[CLMMPositionInfo]) +async def get_clmm_positions_owned( + request: CLMMPositionsOwnedRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get all CLMM liquidity positions owned by a wallet for a specific pool. + + Example: + connector: 'meteora' + network: 'solana-mainnet-beta' + pool_address: '2sf5NYcY4zUPXUSmG6f66mskb24t5F8S11pC1Nz5nQT3' + wallet_address: (optional, uses default if not provided) + + Returns: + List of CLMM position information for the specified pool + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network = accounts_service.gateway_client.parse_network_id(request.network) + + # Get wallet address + wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( + chain=chain, + wallet_address=request.wallet_address + ) + + # Get positions for the specified pool + result = await accounts_service.gateway_client.clmm_positions_owned( + connector=request.connector, + network=network, + wallet_address=wallet_address, + pool_address=request.pool_address + ) + + if result is None: + raise HTTPException(status_code=500, detail="Failed to get positions from Gateway") + + # Gateway returns a list directly + positions_data = result if isinstance(result, list) else [] + positions = [] + + for pos in positions_data: + # Extract token addresses (Gateway returns addresses, not symbols) + base_token_address = pos.get("baseTokenAddress", "") + quote_token_address = pos.get("quoteTokenAddress", "") + + # Use short addresses as symbols for now + base_token = base_token_address[-8:] if base_token_address else "" + quote_token = quote_token_address[-8:] if quote_token_address else "" + trading_pair = f"{base_token}-{quote_token}" if base_token and quote_token else "" + + current_price = Decimal(str(pos.get("price", 0))) + lower_price = Decimal(str(pos.get("lowerPrice", 0))) if pos.get("lowerPrice") else Decimal("0") + upper_price = Decimal(str(pos.get("upperPrice", 0))) if pos.get("upperPrice") else Decimal("0") + + # Determine if position is in range + in_range = False + if current_price > 0 and lower_price > 0 and upper_price > 0: + in_range = lower_price <= current_price <= upper_price + + positions.append(CLMMPositionInfo( + position_address=pos.get("address", ""), + pool_address=pos.get("poolAddress", ""), + trading_pair=trading_pair, + base_token=base_token, + quote_token=quote_token, + base_token_amount=Decimal(str(pos.get("baseTokenAmount", 0))), + quote_token_amount=Decimal(str(pos.get("quoteTokenAmount", 0))), + current_price=current_price, + lower_price=lower_price, + upper_price=upper_price, + base_fee_amount=Decimal(str(pos.get("baseFeeAmount", 0))) if pos.get("baseFeeAmount") else None, + quote_fee_amount=Decimal(str(pos.get("quoteFeeAmount", 0))) if pos.get("quoteFeeAmount") else None, + lower_bin_id=pos.get("lowerBinId"), + upper_bin_id=pos.get("upperBinId"), + in_range=in_range + )) + + return positions + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error getting CLMM positions owned: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting CLMM positions owned: {str(e)}") + + +@router.get("/clmm/positions/{position_address}/events") +async def get_clmm_position_events( + position_address: str, + event_type: Optional[str] = None, + limit: int = 100, + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Get event history for a CLMM position. + + Args: + position_address: Position NFT address + event_type: Filter by event type (OPEN, ADD_LIQUIDITY, REMOVE_LIQUIDITY, COLLECT_FEES, CLOSE) + limit: Max events to return + + Returns: + List of position events + """ + try: + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + events = await clmm_repo.get_position_events( + position_address=position_address, + event_type=event_type, + limit=limit + ) + + return { + "data": [clmm_repo.event_to_dict(event) for event in events], + "total_count": len(events) + } + + except Exception as e: + logger.error(f"Error getting position events: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting position events: {str(e)}") + + +@router.post("/clmm/positions/search") +async def search_clmm_positions( + network: Optional[str] = None, + connector: Optional[str] = None, + wallet_address: Optional[str] = None, + trading_pair: Optional[str] = None, + status: Optional[str] = None, + position_addresses: Optional[List[str]] = Query(None), + limit: int = 50, + offset: int = 0, + refresh: bool = False, + db_manager: AsyncDatabaseManager = Depends(get_database_manager), + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Search CLMM positions with filters. + + Args: + network: Filter by network (e.g., 'solana-mainnet-beta') + connector: Filter by connector (e.g., 'meteora') + wallet_address: Filter by wallet address + trading_pair: Filter by trading pair (e.g., 'SOL-USDC') + status: Filter by status (OPEN, CLOSED) + position_addresses: Filter by specific position addresses (list of addresses) + limit: Max results (default 50, max 1000) + offset: Pagination offset + refresh: If True, refresh position data from Gateway before returning (default False) + + Returns: + Paginated list of positions + """ + try: + # Validate limit + if limit > 1000: + limit = 1000 + + # Optionally refresh position data from Gateway first + if refresh and await accounts_service.gateway_client.ping(): + # Get positions to refresh + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + positions_to_refresh = await clmm_repo.get_positions( + network=network, + connector=connector, + wallet_address=wallet_address, + trading_pair=trading_pair, + status=status, + position_addresses=position_addresses, + limit=limit, + offset=offset + ) + + # Extract position addresses and details before closing session + position_details = [ + { + "position_address": pos.position_address, + "pool_address": pos.pool_address, + "connector": pos.connector, + "network": pos.network, + "wallet_address": pos.wallet_address + } + for pos in positions_to_refresh + ] + + # Refresh each position in a separate session + logger.info(f"Refreshing {len(position_details)} positions from Gateway") + for pos_detail in position_details: + try: + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + # Get position again in this session + position = await clmm_repo.get_position_by_address(pos_detail["position_address"]) + if position: + await _refresh_position_data(position, accounts_service, clmm_repo) + except Exception as e: + logger.warning(f"Failed to refresh position {pos_detail['position_address']}: {e}") + # Continue with other positions even if one fails + + # Get final results after refresh + async with db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + positions = await clmm_repo.get_positions( + network=network, + connector=connector, + wallet_address=wallet_address, + trading_pair=trading_pair, + status=status, + position_addresses=position_addresses, + limit=limit, + offset=offset + ) + + # Get total count for pagination + has_more = len(positions) == limit + + return { + "data": [clmm_repo.position_to_dict(pos) for pos in positions], + "pagination": { + "limit": limit, + "offset": offset, + "has_more": has_more, + "total_count": len(positions) + offset if not has_more else None + } + } + + except Exception as e: + logger.error(f"Error searching CLMM positions: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error searching CLMM positions: {str(e)}") + + diff --git a/routers/gateway_swap.py b/routers/gateway_swap.py new file mode 100644 index 00000000..335d9fd6 --- /dev/null +++ b/routers/gateway_swap.py @@ -0,0 +1,369 @@ +""" +Gateway Swap Router - Handles DEX swap operations via Hummingbot Gateway. +Supports Router connectors (Jupiter, 0x) for token swaps. +""" +import logging +from typing import Optional +from decimal import Decimal + +from fastapi import APIRouter, Depends, HTTPException + +from deps import get_accounts_service, get_database_manager +from services.accounts_service import AccountsService +from database import AsyncDatabaseManager +from database.repositories import GatewaySwapRepository +from models import ( + SwapQuoteRequest, + SwapQuoteResponse, + SwapExecuteRequest, + SwapExecuteResponse, +) + +logger = logging.getLogger(__name__) + +router = APIRouter(tags=["Gateway Swaps"], prefix="/gateway") + + +def get_transaction_status_from_response(gateway_response: dict) -> str: + """ + Determine transaction status from Gateway response. + + Gateway returns status field in the response: + - status: 1 = confirmed + - status: 0 = pending/submitted + + Returns: + "CONFIRMED" if status == 1 + "SUBMITTED" if status == 0 or not present + """ + status = gateway_response.get("status") + + # Status 1 means transaction is confirmed on-chain + if status == 1: + return "CONFIRMED" + + # Status 0 or missing means submitted but not confirmed yet + return "SUBMITTED" + + +@router.post("/swap/quote", response_model=SwapQuoteResponse) +async def get_swap_quote( + request: SwapQuoteRequest, + accounts_service: AccountsService = Depends(get_accounts_service) +): + """ + Get a price quote for a swap via router (Jupiter, 0x). + + Example: + connector: 'jupiter' + network: 'solana-mainnet-beta' + trading_pair: 'SOL-USDC' + side: 'BUY' + amount: 1 + slippage_pct: 1 + + Returns: + Quote with price, expected output amount, and gas estimate + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network = accounts_service.gateway_client.parse_network_id(request.network) + + # Parse trading pair + base, quote = request.trading_pair.split("-") + + # Get quote from Gateway + result = await accounts_service.gateway_client.quote_swap( + connector=request.connector, + network=network, + base_asset=base, + quote_asset=quote, + amount=float(request.amount), + side=request.side, + slippage_pct=float(request.slippage_pct) if request.slippage_pct else 1.0, + pool_address=None + ) + + # Extract amounts from Gateway response (snake_case for consistency) + amount_in_raw = result.get("amountIn") or result.get("amount_in") + amount_out_raw = result.get("amountOut") or result.get("amount_out") + + amount_in = Decimal(str(amount_in_raw)) if amount_in_raw else None + amount_out = Decimal(str(amount_out_raw)) if amount_out_raw else None + + # Extract gas estimate (try both camelCase and snake_case) + gas_estimate = result.get("gasEstimate") or result.get("gas_estimate") + gas_estimate_value = Decimal(str(gas_estimate)) if gas_estimate else None + + return SwapQuoteResponse( + base=base, + quote=quote, + price=Decimal(str(result.get("price", 0))), + amount=request.amount, + amount_in=amount_in, + amount_out=amount_out, + expected_amount=amount_out, # Deprecated, kept for backward compatibility + slippage_pct=request.slippage_pct or Decimal("1.0"), + gas_estimate=gas_estimate_value + ) + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error getting swap quote: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting swap quote: {str(e)}") + + +@router.post("/swap/execute", response_model=SwapExecuteResponse) +async def execute_swap( + request: SwapExecuteRequest, + accounts_service: AccountsService = Depends(get_accounts_service), + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Execute a swap transaction via router (Jupiter, 0x). + + Example: + connector: 'jupiter' + network: 'solana-mainnet-beta' + trading_pair: 'SOL-USDC' + side: 'BUY' + amount: 1 + slippage_pct: 1 + wallet_address: (optional, uses default if not provided) + + Returns: + Transaction hash and swap details + """ + try: + if not await accounts_service.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + # Parse network_id + chain, network = accounts_service.gateway_client.parse_network_id(request.network) + + # Get wallet address + wallet_address = await accounts_service.gateway_client.get_wallet_address_or_default( + chain=chain, + wallet_address=request.wallet_address + ) + + # Parse trading pair + base, quote = request.trading_pair.split("-") + + # Execute swap + result = await accounts_service.gateway_client.execute_swap( + connector=request.connector, + network=network, + wallet_address=wallet_address, + base_asset=base, + quote_asset=quote, + amount=float(request.amount), + side=request.side, + slippage_pct=float(request.slippage_pct) if request.slippage_pct else 1.0 + ) + if not result: + raise HTTPException(status_code=500, detail="Gateway service is not able to execute swap") + transaction_hash = result.get("signature") or result.get("txHash") or result.get("hash") + if not transaction_hash: + raise HTTPException(status_code=500, detail="No transaction hash returned from Gateway") + + # Extract swap data from Gateway response + # Gateway returns amounts nested under 'data' object + data = result.get("data", {}) + amount_in_raw = data.get("amountIn") + amount_out_raw = data.get("amountOut") + + # Use amounts from Gateway response, fallback to request amount if not available + input_amount = Decimal(str(amount_in_raw)) if amount_in_raw is not None else request.amount + output_amount = Decimal(str(amount_out_raw)) if amount_out_raw is not None else Decimal("0") + + # Calculate price from actual swap amounts + # Price = output / input (how much quote you get/pay per base) + price = output_amount / input_amount if input_amount > 0 else Decimal("0") + + # Get transaction status from Gateway response + tx_status = get_transaction_status_from_response(result) + + # Store swap in database + try: + async with db_manager.get_session_context() as session: + swap_repo = GatewaySwapRepository(session) + + swap_data = { + "transaction_hash": transaction_hash, + "network": request.network, + "connector": request.connector, + "wallet_address": wallet_address, + "trading_pair": request.trading_pair, + "base_token": base, + "quote_token": quote, + "side": request.side, + "input_amount": float(input_amount), + "output_amount": float(output_amount), + "price": float(price), + "slippage_pct": float(request.slippage_pct) if request.slippage_pct else 1.0, + "status": tx_status, + "pool_address": result.get("poolAddress") or result.get("pool_address") + } + + await swap_repo.create_swap(swap_data) + logger.info(f"Recorded swap in database: {transaction_hash} (status: {tx_status})") + except Exception as db_error: + # Log but don't fail the swap - it was submitted successfully + logger.error(f"Error recording swap in database: {db_error}", exc_info=True) + + return SwapExecuteResponse( + transaction_hash=transaction_hash, + trading_pair=request.trading_pair, + side=request.side, + amount=request.amount, + status="submitted" + ) + + except HTTPException: + raise + except ValueError as e: + raise HTTPException(status_code=400, detail=str(e)) + except Exception as e: + logger.error(f"Error executing swap: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error executing swap: {str(e)}") + + +@router.get("/swaps/{transaction_hash}/status") +async def get_swap_status( + transaction_hash: str, + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Get status of a specific swap by transaction hash. + + Args: + transaction_hash: Transaction hash of the swap + + Returns: + Swap details including current status + """ + try: + async with db_manager.get_session_context() as session: + swap_repo = GatewaySwapRepository(session) + swap = await swap_repo.get_swap_by_tx_hash(transaction_hash) + + if not swap: + raise HTTPException(status_code=404, detail=f"Swap not found: {transaction_hash}") + + return swap_repo.to_dict(swap) + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting swap status: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting swap status: {str(e)}") + + +@router.post("/swaps/search") +async def search_swaps( + network: Optional[str] = None, + connector: Optional[str] = None, + wallet_address: Optional[str] = None, + trading_pair: Optional[str] = None, + status: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + limit: int = 50, + offset: int = 0, + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Search swap history with filters. + + Args: + network: Filter by network (e.g., 'solana-mainnet-beta') + connector: Filter by connector (e.g., 'jupiter') + wallet_address: Filter by wallet address + trading_pair: Filter by trading pair (e.g., 'SOL-USDC') + status: Filter by status (SUBMITTED, CONFIRMED, FAILED) + start_time: Start timestamp (unix seconds) + end_time: End timestamp (unix seconds) + limit: Max results (default 50, max 1000) + offset: Pagination offset + + Returns: + Paginated list of swaps + """ + try: + # Validate limit + if limit > 1000: + limit = 1000 + + async with db_manager.get_session_context() as session: + swap_repo = GatewaySwapRepository(session) + swaps = await swap_repo.get_swaps( + network=network, + connector=connector, + wallet_address=wallet_address, + trading_pair=trading_pair, + status=status, + start_time=start_time, + end_time=end_time, + limit=limit, + offset=offset + ) + + # Get total count for pagination (simplified - actual count would need separate query) + has_more = len(swaps) == limit + + return { + "data": [swap_repo.to_dict(swap) for swap in swaps], + "pagination": { + "limit": limit, + "offset": offset, + "has_more": has_more, + "total_count": len(swaps) + offset if not has_more else None + } + } + + except Exception as e: + logger.error(f"Error searching swaps: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error searching swaps: {str(e)}") + + +@router.get("/swaps/summary") +async def get_swaps_summary( + network: Optional[str] = None, + wallet_address: Optional[str] = None, + start_time: Optional[int] = None, + end_time: Optional[int] = None, + db_manager: AsyncDatabaseManager = Depends(get_database_manager) +): + """ + Get swap summary statistics. + + Args: + network: Filter by network + wallet_address: Filter by wallet address + start_time: Start timestamp (unix seconds) + end_time: End timestamp (unix seconds) + + Returns: + Summary statistics including volume, fees, success rate + """ + try: + async with db_manager.get_session_context() as session: + swap_repo = GatewaySwapRepository(session) + summary = await swap_repo.get_swaps_summary( + network=network, + wallet_address=wallet_address, + start_time=start_time, + end_time=end_time + ) + return summary + + except Exception as e: + logger.error(f"Error getting swaps summary: {e}", exc_info=True) + raise HTTPException(status_code=500, detail=f"Error getting swaps summary: {str(e)}") diff --git a/routers/portfolio.py b/routers/portfolio.py index 4a5b69bd..6941cfc7 100644 --- a/routers/portfolio.py +++ b/routers/portfolio.py @@ -1,7 +1,7 @@ from typing import Dict, List, Optional from datetime import datetime -from fastapi import APIRouter, HTTPException, Depends +from fastapi import APIRouter, HTTPException, Depends, Query from models.trading import ( PortfolioStateFilterRequest, @@ -327,5 +327,5 @@ async def get_accounts_distribution( account_data["percentage"] = (account_data.get("total_value", 0) / total_value) * 100 filtered_distribution["account_count"] = len(filtered_distribution["accounts"]) - - return filtered_distribution \ No newline at end of file + + return filtered_distribution diff --git a/services/accounts_service.py b/services/accounts_service.py index 71930ed1..4fa9bc53 100644 --- a/services/accounts_service.py +++ b/services/accounts_service.py @@ -1,5 +1,6 @@ import asyncio import logging +import time from datetime import datetime, timezone from decimal import Decimal from typing import Dict, List, Optional @@ -12,6 +13,8 @@ from config import settings from database import AsyncDatabaseManager, AccountRepository, OrderRepository, TradeRepository, FundingRepository from services.market_data_feed_manager import MarketDataFeedManager +from services.gateway_client import GatewayClient +from services.gateway_transaction_poller import GatewayTransactionPoller from utils.connector_manager import ConnectorManager from utils.file_system import fs_util @@ -31,6 +34,11 @@ class AccountsService: "xrpl": "RLUSD", "kraken": "USD", } + gateway_default_pricing_connector = { + "ethereum": "uniswap/router", + "solana": "jupiter/router", + } + potential_wrapped_tokens = ["ETH", "SOL", "BNB", "POL", "AVAX", "FTM", "ONE", "GLMR", "MOVR"] # Cache for storing last successful prices by trading pair with timestamps _last_known_prices = {} @@ -39,14 +47,16 @@ class AccountsService: def __init__(self, account_update_interval: int = 5, default_quote: str = "USDT", - market_data_feed_manager: Optional[MarketDataFeedManager] = None): + market_data_feed_manager: Optional[MarketDataFeedManager] = None, + gateway_url: str = "http://localhost:15888"): """ Initialize the AccountsService. - + Args: account_update_interval: How often to update account states in minutes (default: 5) default_quote: Default quote currency for trading pairs (default: "USDT") market_data_feed_manager: Market data feed manager for price caching (optional) + gateway_url: URL for Gateway service (default: "http://localhost:15888") """ self.secrets_manager = ETHKeyFileSecretManger(settings.security.config_password) self.accounts_state = {} @@ -54,14 +64,27 @@ def __init__(self, self.default_quote = default_quote self.market_data_feed_manager = market_data_feed_manager self._update_account_state_task: Optional[asyncio.Task] = None - + # Database setup for account states and orders self.db_manager = AsyncDatabaseManager(settings.database.url) self._db_initialized = False - + # Initialize connector manager with db_manager self.connector_manager = ConnectorManager(self.secrets_manager, self.db_manager) + # Initialize Gateway client + self.gateway_client = GatewayClient(gateway_url) + + # Initialize Gateway transaction poller + self.gateway_tx_poller = GatewayTransactionPoller( + db_manager=self.db_manager, + gateway_client=self.gateway_client, + poll_interval=10, # Poll every 10 seconds for transactions + position_poll_interval=60, # Poll every 1 minute for positions + max_retry_age=3600 # Stop retrying after 1 hour + ) + self._gateway_poller_started = False + async def ensure_db_initialized(self): """Ensure database is initialized before using it.""" if not self._db_initialized: @@ -87,22 +110,45 @@ def start(self): # Start the update loop which will call check_all_connectors self._update_account_state_task = asyncio.create_task(self.update_account_state_loop()) + # Start Gateway transaction poller + if not self._gateway_poller_started: + asyncio.create_task(self._start_gateway_poller()) + self._gateway_poller_started = True + logger.info("Gateway transaction poller startup initiated") + + async def _start_gateway_poller(self): + """Start the Gateway transaction poller (async helper).""" + try: + await self.gateway_tx_poller.start() + logger.info("Gateway transaction poller started successfully") + except Exception as e: + logger.error(f"Error starting Gateway transaction poller: {e}", exc_info=True) + async def stop(self): """ Stop all accounts service tasks and cleanup resources. This is the main cleanup method that should be called during application shutdown. """ logger.info("Stopping AccountsService...") - + # Stop the account state update loop if self._update_account_state_task: self._update_account_state_task.cancel() self._update_account_state_task = None logger.info("Stopped account state update loop") - + + # Stop Gateway transaction poller + if self._gateway_poller_started: + try: + await self.gateway_tx_poller.stop() + logger.info("Gateway transaction poller stopped") + self._gateway_poller_started = False + except Exception as e: + logger.error(f"Error stopping Gateway transaction poller: {e}", exc_info=True) + # Stop all connectors through the ConnectorManager await self.connector_manager.stop_all_connectors() - + logger.info("AccountsService stopped successfully") async def update_account_state_loop(self): @@ -244,9 +290,9 @@ async def _initialize_price_tracking(self, account_name: str, connector_name: st logger.error(f"Error initializing price tracking for {connector_name} in account {account_name}: {e}") async def update_account_state(self): - """Update account state for all connectors.""" + """Update account state for all connectors and Gateway wallets.""" all_connectors = self.connector_manager.get_all_connectors() - + for account_name, connectors in all_connectors.items(): if account_name not in self.accounts_state: self.accounts_state[account_name] = {} @@ -258,6 +304,9 @@ async def update_account_state(self): logger.error(f"Error updating balances for connector {connector_name} in account {account_name}: {e}") self.accounts_state[account_name][connector_name] = [] + # Add Gateway wallet balances to master_account if Gateway is available + await self._update_gateway_balances() + async def _get_connector_tokens_info(self, connector, connector_name: str) -> List[Dict]: """Get token info from a connector instance using cached prices when available.""" balances = [{"token": key, "units": value} for key, value in connector.get_all_balances().items() if @@ -1257,21 +1306,21 @@ async def get_funding_payments(self, account_name: str, connector_name: str = No logger.error(f"Error getting funding payments: {e}") return [] - async def get_total_funding_fees(self, account_name: str, connector_name: str, + async def get_total_funding_fees(self, account_name: str, connector_name: str, trading_pair: str) -> Dict: """ Get total funding fees for a specific trading pair. - + Args: account_name: Name of the account connector_name: Name of the connector trading_pair: Trading pair to get fees for - + Returns: Dictionary with total funding fees information """ await self.ensure_db_initialized() - + try: async with self.db_manager.get_session_context() as session: funding_repo = FundingRepository(session) @@ -1280,7 +1329,7 @@ async def get_total_funding_fees(self, account_name: str, connector_name: str, connector_name=connector_name, trading_pair=trading_pair ) - + except Exception as e: logger.error(f"Error getting total funding fees: {e}") return { @@ -1289,3 +1338,267 @@ async def get_total_funding_fees(self, account_name: str, connector_name: str, "fee_currency": None, "error": str(e) } + + # ============================================ + # Gateway Wallet Management Methods + # ============================================ + + async def _update_gateway_balances(self): + """Update Gateway wallet balances in master_account state.""" + try: + # Check if Gateway is available + if not await self.gateway_client.ping(): + logger.debug("Gateway service is not available, skipping wallet balance update") + return + + # Get all wallets from Gateway + wallets = await self.gateway_client.get_wallets() + if not wallets: + logger.debug("No Gateway wallets found") + return + + # Get all available chains and networks + chains_result = await self.gateway_client.get_chains() + if not chains_result or "chains" not in chains_result: + logger.error("Could not get chains from Gateway") + return + + # Build a map of chain -> [networks] + chain_networks_map = {c["chain"]: c["networks"] for c in chains_result["chains"]} + + # Ensure master_account exists in accounts_state + if "master_account" not in self.accounts_state: + self.accounts_state["master_account"] = {} + + # Collect all balance query tasks for parallel execution + balance_tasks = [] + task_metadata = [] # Store (chain, network, address) for each task + + for wallet_info in wallets: + chain = wallet_info.get("chain") + wallet_addresses = wallet_info.get("walletAddresses", []) + + if not chain or not wallet_addresses: + continue + + # Use the first address as the default wallet for this chain + address = wallet_addresses[0] + + # Get all networks for this chain + networks = chain_networks_map.get(chain, []) + if not networks: + logger.warning(f"No networks found for chain '{chain}', skipping") + continue + + # Create tasks for all networks for this wallet + for network in networks: + balance_tasks.append(self.get_gateway_balances(chain, address, network=network)) + task_metadata.append((chain, network, address)) + + # Execute all balance queries in parallel + if balance_tasks: + t_zero = time.time() + results = await asyncio.gather(*balance_tasks, return_exceptions=True) + duration = time.time() - t_zero + # Process results + for idx, (result, (chain, network, address)) in enumerate(zip(results, task_metadata)): + chain_network = f"{chain}-{network}" + + if isinstance(result, Exception): + logger.error(f"Error updating Gateway balances for {chain}-{network} wallet {address}: {result}") + # Store empty list for error state + self.accounts_state["master_account"][chain_network] = [] + elif result: + # Only store if there are actual balances (non-empty list) + self.accounts_state["master_account"][chain_network] = result + else: + # Store empty list to indicate we checked this network + self.accounts_state["master_account"][chain_network] = [] + + except Exception as e: + logger.error(f"Error updating Gateway balances: {e}") + + async def get_gateway_wallets(self) -> List[Dict]: + """ + Get all wallets from Gateway. Gateway manages its own encrypted wallets. + + Returns: + List of wallet information from Gateway + """ + if not await self.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + try: + wallets = await self.gateway_client.get_wallets() + return wallets + except Exception as e: + logger.error(f"Error getting Gateway wallets: {e}") + raise HTTPException(status_code=500, detail=f"Failed to get wallets: {str(e)}") + + async def add_gateway_wallet(self, chain: str, private_key: str) -> Dict: + """ + Add a wallet to Gateway. Gateway handles encryption internally. + + Args: + chain: Blockchain chain (e.g., 'solana', 'ethereum') + private_key: Wallet private key + + Returns: + Dictionary with wallet information from Gateway + """ + if not await self.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + try: + result = await self.gateway_client.add_wallet(chain, private_key, set_default=True) + + if "error" in result: + raise HTTPException(status_code=400, detail=f"Gateway error: {result['error']}") + + logger.info(f"Added {chain} wallet {result.get('address')} to Gateway") + return result + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error adding Gateway wallet: {e}") + raise HTTPException(status_code=500, detail=f"Failed to add wallet: {str(e)}") + + async def remove_gateway_wallet(self, chain: str, address: str) -> Dict: + """ + Remove a wallet from Gateway. + + Args: + chain: Blockchain chain + address: Wallet address to remove + + Returns: + Success message + """ + if not await self.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + try: + result = await self.gateway_client.remove_wallet(chain, address) + + if "error" in result: + raise HTTPException(status_code=400, detail=f"Gateway error: {result['error']}") + + logger.info(f"Removed {chain} wallet {address} from Gateway") + return {"success": True, "message": f"Successfully removed {chain} wallet"} + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error removing Gateway wallet: {e}") + raise HTTPException(status_code=500, detail=f"Failed to remove wallet: {str(e)}") + + async def get_gateway_balances(self, chain: str, address: str, network: Optional[str] = None, tokens: Optional[List[str]] = None) -> List[Dict]: + """ + Get Gateway wallet balances with pricing from rate sources. + + Args: + chain: Blockchain chain + address: Wallet address + network: Optional network name (if not provided, uses default network for chain) + tokens: Optional list of token symbols to query + + Returns: + List of token balance dictionaries with prices from rate sources + """ + if not await self.gateway_client.ping(): + raise HTTPException(status_code=503, detail="Gateway service is not available") + + try: + # Get default network for chain if not provided + if not network: + network = await self.gateway_client.get_default_network(chain) + if not network: + raise HTTPException(status_code=400, detail=f"Could not determine network for chain '{chain}'") + + # Get balances from Gateway + balances_response = await self.gateway_client.get_balances(chain, network, address, tokens=tokens) + + if "error" in balances_response: + raise HTTPException(status_code=400, detail=f"Gateway error: {balances_response['error']}") + + # Format balances list + balances = balances_response.get("balances", {}) + balances_list = [] + + for token, balance in balances.items(): + if balance and float(balance) > 0: + balances_list.append({ + "token": token, + "units": Decimal(str(balance)) + }) + + # Get prices using rate sources (similar to _get_connector_tokens_info) + unique_tokens = [b["token"] for b in balances_list] + connector_name = f"gateway_{chain}-{network}" + + # Try to get cached prices first + prices_from_cache = {} + tokens_need_update = [] + + if self.market_data_feed_manager: + for token in unique_tokens: + try: + token_unwrapped = self.get_unwrapped_token(token) + trading_pair = f"{token_unwrapped}-USDT" + cached_price = self.market_data_feed_manager.market_data_provider.get_rate(trading_pair) + if cached_price > 0: + prices_from_cache[trading_pair] = cached_price + else: + tokens_need_update.append(token) + except Exception: + tokens_need_update.append(token) + else: + tokens_need_update = unique_tokens + + # Initialize rate sources for Gateway (using "gateway" as connector for AMM pairs) + if tokens_need_update: + pricing_connector = self.gateway_default_pricing_connector[chain] + trading_pairs_need_update = [f"{token}-USDC" for token in tokens_need_update] + connector_pairs = [ConnectorPair(connector_name=pricing_connector, trading_pair=tp) for tp in trading_pairs_need_update] + for pair in connector_pairs: + self.market_data_feed_manager.market_data_provider._rates_required.add_or_update( + f"gateway_{chain}-{network}", pair + ) + logger.info(f"Added {len(trading_pairs_need_update)} Gateway trading pairs to market data provider: {trading_pairs_need_update}") + + # Use cached prices (rate sources will update in background) + all_prices = prices_from_cache + + # Format final result with prices + formatted_balances = [] + for balance in balances_list: + token = balance["token"] + if "USD" in token: + price = Decimal("1") + else: + market = self.get_default_market(token, connector_name) + price = Decimal(str(all_prices.get(market, 0))) + + formatted_balances.append({ + "token": token, + "units": float(balance["units"]), + "price": float(price), + "value": float(price * balance["units"]), + "available_units": float(balance["units"]) + }) + + return formatted_balances + + except HTTPException: + raise + except Exception as e: + logger.error(f"Error getting Gateway balances: {e}") + raise HTTPException(status_code=500, detail=f"Failed to get balances: {str(e)}") + + def get_unwrapped_token(self, token: str) -> str: + """Get the unwrapped version of a wrapped token symbol.""" + for pw in self.potential_wrapped_tokens: + if token in pw: + return pw + return token \ No newline at end of file diff --git a/services/gateway_client.py b/services/gateway_client.py new file mode 100644 index 00000000..6f788a3b --- /dev/null +++ b/services/gateway_client.py @@ -0,0 +1,473 @@ +import logging +from typing import Dict, List, Optional +import aiohttp +from decimal import Decimal + +logger = logging.getLogger(__name__) + + +class GatewayClient: + """ + Simplified Gateway HTTP client for API integration. + Provides essential functionality for wallet management and balance queries. + """ + + def __init__(self, base_url: str = "http://localhost:15888"): + self.base_url = base_url + self._session: Optional[aiohttp.ClientSession] = None + + @staticmethod + def parse_network_id(network_id: str) -> tuple[str, str]: + """ + Parse network_id in format 'chain-network' into (chain, network). + + Examples: + 'solana-mainnet-beta' -> ('solana', 'mainnet-beta') + 'ethereum-mainnet' -> ('ethereum', 'mainnet') + """ + parts = network_id.split('-', 1) + if len(parts) != 2: + raise ValueError(f"Invalid network_id format. Expected 'chain-network', got '{network_id}'") + return parts[0], parts[1] + + async def get_wallet_address_or_default(self, chain: str, wallet_address: Optional[str] = None) -> str: + """Get wallet address - use provided or get default for chain""" + if wallet_address: + return wallet_address + + default_wallet = await self.get_default_wallet_address(chain) + if not default_wallet: + raise ValueError(f"No wallet configured for chain '{chain}'") + return default_wallet + + async def _get_session(self) -> aiohttp.ClientSession: + """Get or create aiohttp session""" + if self._session is None or self._session.closed: + self._session = aiohttp.ClientSession() + return self._session + + async def close(self): + """Close the aiohttp session""" + if self._session and not self._session.closed: + await self._session.close() + + async def _request(self, method: str, path: str, params: Dict = None, json: Dict = None) -> Optional[Dict]: + """Make HTTP request to Gateway""" + session = await self._get_session() + url = f"{self.base_url}/{path}" + + try: + if method == "GET": + async with session.get(url, params=params) as response: + response.raise_for_status() + return await response.json() + elif method == "POST": + async with session.post(url, json=json) as response: + response.raise_for_status() + return await response.json() + elif method == "DELETE": + async with session.delete(url, params=params, json=json) as response: + response.raise_for_status() + return await response.json() + except aiohttp.ClientResponseError as e: + logger.warning(f"Gateway request failed with status {e.status}: {method} {url} - {e.message}") + return None + except aiohttp.ClientError as e: + logger.debug(f"Gateway request error: {method} {url} - {e}") + return None + except Exception as e: + logger.debug(f"Gateway request failed: {method} {url} - {e}") + raise + + async def ping(self) -> bool: + """Check if Gateway is online""" + try: + response = await self._request("GET", "") + return response.get("status") == "ok" + except Exception: + return False + + async def get_wallets(self) -> List[Dict]: + """Get all connected wallets""" + return await self._request("GET", "wallet") + + async def get_default_wallet_address(self, chain: str) -> Optional[str]: + """Get default wallet address for a chain""" + try: + wallets = await self.get_wallets() + for wallet in wallets: + if wallet.get("chain") == chain: + addresses = wallet.get("walletAddresses", []) + return addresses[0] if addresses else None + return None + except Exception as e: + logger.error(f"Error getting default wallet for chain {chain}: {e}") + return None + + async def add_wallet(self, chain: str, private_key: str, set_default: bool = True) -> Dict: + """Add a wallet to Gateway""" + return await self._request("POST", "wallet/add", json={ + "chain": chain, + "privateKey": private_key, + "setDefault": set_default + }) + + async def remove_wallet(self, chain: str, address: str) -> Dict: + """Remove a wallet from Gateway""" + return await self._request("DELETE", "wallet/remove", json={ + "chain": chain, + "address": address + }) + + async def get_balances(self, chain: str, network: str, address: str, tokens: Optional[List[str]] = None) -> Dict: + """Get token balances for a wallet""" + return await self._request("POST", f"chains/{chain}/balances", json={ + "network": network, + "address": address, + "tokens": tokens if tokens is not None else [] + }) + + async def get_chains(self) -> Dict: + """Get available chains""" + return await self._request("GET", "config/chains") + + async def get_default_network(self, chain: str) -> Optional[str]: + """Get default network for a chain""" + try: + config = await self._request("GET", "config", params={"namespace": chain}) + return config.get("defaultNetwork") + except Exception: + return None + + async def get_tokens(self, chain: str, network: str) -> Dict: + """Get available tokens for a chain/network""" + return await self._request("GET", "tokens", params={ + "chain": chain, + "network": network + }) + + async def add_token(self, chain: str, network: str, address: str, symbol: str, name: str, decimals: int) -> Dict: + """Add a custom token to Gateway's token list""" + return await self._request("POST", "tokens", json={ + "chain": chain, + "network": network, + "token": { + "address": address, + "symbol": symbol, + "name": name, + "decimals": decimals + } + }) + + async def delete_token(self, chain: str, network: str, token_address: str) -> Dict: + """Delete a custom token from Gateway's token list""" + return await self._request("DELETE", f"tokens/{token_address}", params={ + "chain": chain, + "network": network + }) + + async def get_config(self, namespace: str) -> Dict: + """Get configuration for a specific namespace (connector or chain-network)""" + return await self._request("GET", "config", params={"namespace": namespace}) + + async def update_config(self, namespace: str, path: str, value: any) -> Dict: + """Update a configuration value for a namespace""" + return await self._request("POST", "config/update", json={ + "namespace": namespace, + "path": path, + "value": value + }) + + async def get_pools(self, connector: str, network: str) -> List[Dict]: + """Get pools for a connector and network""" + return await self._request("GET", "pools", params={ + "connector": connector, + "network": network + }) + + async def add_pool(self, connector: str, pool_type: str, network: str, base_symbol: str, quote_symbol: str, address: str) -> Dict: + """Add a new pool""" + return await self._request("POST", "pools", json={ + "connector": connector, + "type": pool_type, + "network": network, + "baseSymbol": base_symbol, + "quoteSymbol": quote_symbol, + "address": address + }) + + async def pool_info(self, connector: str, network: str, pool_address: str) -> Dict: + """Get detailed information about a specific pool""" + return await self._request("POST", "clmm/liquidity/pool", json={ + "connector": connector, + "network": network, + "poolAddress": pool_address + }) + + # ============================================ + # Swap Operations + # ============================================ + + async def quote_swap( + self, + connector: str, + network: str, + base_asset: str, + quote_asset: str, + amount: float, + side: str, + slippage_pct: Optional[float] = None, + pool_address: Optional[str] = None + ) -> Dict: + """Get a quote for a swap""" + payload = { + "network": network, + "baseToken": base_asset, + "quoteToken": quote_asset, + "amount": str(amount), + "side": side.upper() + } + if slippage_pct is not None: + payload["slippagePct"] = slippage_pct + if pool_address: + payload["poolAddress"] = pool_address + + return await self._request("GET", f"connectors/{connector}/router/quote-swap", params=payload) + + async def execute_swap( + self, + connector: str, + network: str, + wallet_address: str, + base_asset: str, + quote_asset: str, + amount: float, + side: str, + slippage_pct: Optional[float] = None + ) -> Dict: + """Execute a swap""" + payload = { + "network": network, + "walletAddress": wallet_address, + "baseToken": base_asset, + "quoteToken": quote_asset, + "amount": str(amount), + "side": side.upper() + } + if slippage_pct is not None: + payload["slippagePct"] = slippage_pct + + return await self._request("POST", f"connectors/{connector}/router/execute-swap", json=payload) + + async def execute_quote( + self, + connector: str, + network: str, + wallet_address: str, + quote_id: str + ) -> Dict: + """Execute a previously obtained quote""" + return await self._request("POST", f"connectors/{connector}/router/execute-quote", json={ + "network": network, + "address": wallet_address, + "quoteId": quote_id + }) + + # ============================================ + # Liquidity Operations - CLMM (Concentrated Liquidity) + # ============================================ + + async def clmm_open_position( + self, + connector: str, + network: str, + wallet_address: str, + pool_address: str, + lower_price: float, + upper_price: float, + base_token_amount: Optional[float] = None, + quote_token_amount: Optional[float] = None, + slippage_pct: Optional[float] = None, + extra_params: Optional[Dict] = None + ) -> Dict: + """Open a NEW CLMM position with initial liquidity""" + payload = { + "network": network, + "walletAddress": wallet_address, + "poolAddress": pool_address, + "lowerPrice": lower_price, + "upperPrice": upper_price + } + if base_token_amount is not None: + payload["baseTokenAmount"] = str(base_token_amount) + if quote_token_amount is not None: + payload["quoteTokenAmount"] = str(quote_token_amount) + if slippage_pct is not None: + payload["slippagePct"] = slippage_pct + + # Add any connector-specific parameters + if extra_params: + payload.update(extra_params) + + return await self._request("POST", f"connectors/{connector}/clmm/open-position", json=payload) + + async def clmm_add_liquidity( + self, + connector: str, + network: str, + wallet_address: str, + position_address: str, + base_token_amount: Optional[float] = None, + quote_token_amount: Optional[float] = None, + slippage_pct: Optional[float] = None + ) -> Dict: + """Add more liquidity to an existing CLMM position""" + payload = { + "connector": connector, + "network": network, + "address": wallet_address, + "positionAddress": position_address + } + if base_token_amount is not None: + payload["baseTokenAmount"] = str(base_token_amount) + if quote_token_amount is not None: + payload["quoteTokenAmount"] = str(quote_token_amount) + if slippage_pct is not None: + payload["slippagePct"] = slippage_pct + + return await self._request("POST", "clmm/liquidity/add", json=payload) + + async def clmm_close_position( + self, + connector: str, + network: str, + wallet_address: str, + position_address: str + ) -> Dict: + """Close a CLMM position completely""" + return await self._request("POST", f"connectors/{connector}/clmm/close-position", json={ + "network": network, + "walletAddress": wallet_address, + "positionAddress": position_address + }) + + async def clmm_remove_liquidity( + self, + connector: str, + network: str, + wallet_address: str, + position_address: str, + percentage: float + ) -> Dict: + """Remove liquidity from a CLMM position (partial)""" + return await self._request("POST", "clmm/liquidity/remove", json={ + "connector": connector, + "network": network, + "address": wallet_address, + "positionAddress": position_address, + "percentage": percentage + }) + + async def clmm_position_info( + self, + connector: str, + network: str, + wallet_address: str, + position_address: str + ) -> Dict: + """Get CLMM position information""" + return await self._request("POST", "clmm/liquidity/position", json={ + "connector": connector, + "network": network, + "address": wallet_address, + "positionAddress": position_address + }) + + async def clmm_positions_owned( + self, + connector: str, + network: str, + wallet_address: str, + pool_address: str + ) -> Dict: + """Get all CLMM positions owned by wallet for a specific pool""" + params = { + "network": network, + "walletAddress": wallet_address, + "poolAddress": pool_address + } + + return await self._request("GET", f"connectors/{connector}/clmm/positions-owned", params=params) + + async def clmm_collect_fees( + self, + connector: str, + network: str, + wallet_address: str, + position_address: str + ) -> Dict: + """Collect accumulated fees from a CLMM position""" + return await self._request("POST", f"connectors/{connector}/clmm/collect-fees", json={ + "network": network, + "address": wallet_address, + "positionAddress": position_address + }) + + async def clmm_pool_info( + self, + connector: str, + network: str, + pool_address: str + ) -> Dict: + """Get detailed CLMM pool information by pool address""" + return await self._request("GET", f"connectors/{connector}/clmm/pool-info", params={ + "network": network, + "poolAddress": pool_address + }) + + # ============================================ + # Transaction Polling + # ============================================ + + async def poll_transaction( + self, + network_id: str, + tx_hash: str, + wallet_address: Optional[str] = None + ) -> Optional[Dict]: + """ + Poll transaction status on blockchain. + + Args: + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta', 'ethereum-mainnet') + tx_hash: Transaction hash/signature + wallet_address: Optional wallet address for verification + + Returns: + Transaction status dict with fields: + - txStatus: 1 for confirmed, 0 for failed/pending + - fee: Transaction fee amount + - txData: Full transaction data including meta.err + Returns None if Gateway is unavailable or request fails. + """ + try: + # Split network_id into chain and network + parts = network_id.split('-', 1) + if len(parts) != 2: + logger.error(f"Invalid network_id format: {network_id}. Expected 'chain-network'") + return None + + chain, network = parts + + payload = { + "network": network, + "signature": tx_hash + } + if wallet_address: + payload["walletAddress"] = wallet_address + + return await self._request("POST", f"chains/{chain}/poll", json=payload) + except Exception as e: + logger.error(f"Error polling transaction {tx_hash}: {e}") + return None + diff --git a/services/gateway_service.py b/services/gateway_service.py new file mode 100644 index 00000000..d7378838 --- /dev/null +++ b/services/gateway_service.py @@ -0,0 +1,361 @@ +import logging +import os +import platform +import shutil +from typing import Optional, Dict + +import docker +from docker.errors import DockerException +from docker.types import LogConfig + +from models.gateway import GatewayConfig, GatewayStatus + +# Create module-specific logger +logger = logging.getLogger(__name__) + + +class GatewayService: + """ + Service for managing the Hummingbot Gateway Docker container. + Ensures only one Gateway instance can exist at a time. + """ + + GATEWAY_CONTAINER_NAME = "gateway" + GATEWAY_DIR = "gateway-files" + + def __init__(self): + self.SOURCE_PATH = os.getcwd() + # Use BOTS_PATH if set (for Docker), otherwise use SOURCE_PATH (for local) + self.BOTS_PATH = os.environ.get('BOTS_PATH', self.SOURCE_PATH) + try: + self.client = docker.from_env() + except DockerException as e: + logger.error(f"Failed to connect to Docker. Error: {e}") + raise + + def _ensure_gateway_directories(self): + """Create necessary directories for Gateway if they don't exist""" + # Gateway files are at root level, same as bots directory + gateway_base = os.path.join(self.BOTS_PATH, self.GATEWAY_DIR) + + conf_dir = os.path.join(gateway_base, "conf") + logs_dir = os.path.join(gateway_base, "logs") + + os.makedirs(conf_dir, exist_ok=True) + os.makedirs(logs_dir, exist_ok=True) + + return { + "base": gateway_base, + "conf": conf_dir, + "logs": logs_dir + } + + def _get_gateway_container(self) -> Optional[docker.models.containers.Container]: + """Get the Gateway container if it exists""" + try: + return self.client.containers.get(self.GATEWAY_CONTAINER_NAME) + except docker.errors.NotFound: + return None + except DockerException as e: + logger.error(f"Error getting Gateway container: {e}") + return None + + def get_status(self) -> GatewayStatus: + """Get the current status of the Gateway container""" + container = self._get_gateway_container() + + if container is None: + return GatewayStatus( + running=False, + container_id=None, + image=None, + created_at=None, + port=None + ) + + # Extract port from container configuration + port = None + if container.status == "running": + # Check if using host networking + network_mode = container.attrs.get("HostConfig", {}).get("NetworkMode", "") + if network_mode == "host": + # Host networking: Gateway uses port 15888 directly + port = 15888 + else: + # Bridge networking: Extract from port mappings + ports = container.attrs.get("NetworkSettings", {}).get("Ports", {}) + if "15888/tcp" in ports and ports["15888/tcp"]: + port = int(ports["15888/tcp"][0]["HostPort"]) + + return GatewayStatus( + running=container.status == "running", + container_id=container.id, + image=container.image.tags[0] if container.image.tags else container.image.id[:12], + created_at=container.attrs.get("Created"), + port=port + ) + + def start(self, config: GatewayConfig) -> Dict[str, any]: + """ + Start the Gateway container. + If a container already exists, it will be stopped and removed before creating a new one. + """ + # Check if Gateway is already running + existing_container = self._get_gateway_container() + if existing_container: + if existing_container.status == "running": + return { + "success": False, + "message": f"Gateway is already running. Use stop first or restart to update configuration." + } + else: + # Remove stopped container + logger.info("Removing stopped Gateway container") + existing_container.remove(force=True) + + # Ensure directories exist + dirs = self._ensure_gateway_directories() + + # Set up volumes - use BOTS_PATH which contains the HOST path + volumes = { + os.path.join(self.BOTS_PATH, self.GATEWAY_DIR, "conf"): {'bind': '/home/gateway/conf', 'mode': 'rw'}, + os.path.join(self.BOTS_PATH, self.GATEWAY_DIR, "logs"): {'bind': '/home/gateway/logs', 'mode': 'rw'}, + } + + # Set up environment variables + environment = { + "GATEWAY_PASSPHRASE": config.passphrase, + "DEV": str(config.dev_mode).lower(), + } + + # Configure logging + log_config = LogConfig( + type="json-file", + config={ + 'max-size': '10m', + 'max-file': "5", + } + ) + + # Detect platform and configure networking + # Native Linux: Use host networking (works natively) + # Docker Desktop (macOS/Windows) or containerized: Use bridge networking + system_platform = platform.system() + + # Check if running inside Docker container (Docker Desktop or containerized API) + in_container = os.path.exists('/.dockerenv') or os.path.exists('/run/.containerenv') + + # Only use host networking on native Linux (not inside a container) + use_host_network = system_platform == "Linux" and not in_container + + if use_host_network: + logger.info("Detected native Linux - using host network mode for Gateway") + else: + logger.info(f"Detected {system_platform} (in_container={in_container}) - using bridge networking for Gateway") + + try: + # Build container configuration + container_config = { + "image": config.image, + "name": self.GATEWAY_CONTAINER_NAME, + "volumes": volumes, + "environment": environment, + "detach": True, + "restart_policy": {"Name": "always"}, + "log_config": log_config, + } + + if use_host_network: + # Linux: Use host networking + container_config["network_mode"] = "host" + else: + # macOS/Windows: Use bridge networking with port mapping + container_config["ports"] = {'15888/tcp': config.port} + + container = self.client.containers.run(**container_config) + + # On macOS/Windows, connect to emqx-bridge network if it exists + if not use_host_network: + possible_networks = ["hummingbot-api_emqx-bridge", "emqx-bridge"] + for net in possible_networks: + try: + network = self.client.networks.get(net) + network.connect(container) + logger.info(f"Connected Gateway to {net} network") + break + except docker.errors.NotFound: + continue + + logger.info(f"Gateway container started successfully: {container.id}") + return { + "success": True, + "message": f"Gateway started successfully", + "container_id": container.id, + "port": config.port + } + + except DockerException as e: + logger.error(f"Failed to start Gateway container: {e}") + return { + "success": False, + "message": f"Failed to start Gateway: {str(e)}" + } + + def stop(self) -> Dict[str, any]: + """Stop the Gateway container""" + container = self._get_gateway_container() + + if container is None: + return { + "success": False, + "message": "Gateway container not found" + } + + try: + if container.status == "running": + container.stop() + logger.info("Gateway container stopped") + return { + "success": True, + "message": "Gateway stopped successfully" + } + except DockerException as e: + logger.error(f"Failed to stop Gateway container: {e}") + return { + "success": False, + "message": f"Failed to stop Gateway: {str(e)}" + } + + def restart(self, config: Optional[GatewayConfig] = None) -> Dict[str, any]: + """ + Restart the Gateway container. + If config is provided, the container will be recreated with the new configuration. + """ + container = self._get_gateway_container() + + if container is None: + if config: + # No existing container, just start with new config + return self.start(config) + else: + return { + "success": False, + "message": "Gateway container not found. Use start with configuration to create one." + } + + if config: + # Stop and remove existing container, then start with new config + try: + container.remove(force=True) + logger.info("Removed existing Gateway container for restart with new config") + except DockerException as e: + logger.error(f"Failed to remove Gateway container: {e}") + return { + "success": False, + "message": f"Failed to remove existing container: {str(e)}" + } + return self.start(config) + else: + # Simple restart of existing container + try: + container.restart() + logger.info("Gateway container restarted") + return { + "success": True, + "message": "Gateway restarted successfully" + } + except DockerException as e: + logger.error(f"Failed to restart Gateway container: {e}") + return { + "success": False, + "message": f"Failed to restart Gateway: {str(e)}" + } + + def remove(self, remove_data: bool = False) -> Dict[str, any]: + """ + Remove the Gateway container and optionally its data. + + Args: + remove_data: If True, also remove the gateway-files directory + """ + container = self._get_gateway_container() + + if container is None: + if remove_data: + # No container, but try to remove data if requested + gateway_dir = os.path.join(self.SOURCE_PATH, self.GATEWAY_DIR) + if os.path.exists(gateway_dir): + try: + shutil.rmtree(gateway_dir) + logger.info(f"Removed Gateway data directory: {gateway_dir}") + return { + "success": True, + "message": "Gateway data removed (no container was found)" + } + except Exception as e: + logger.error(f"Failed to remove Gateway data: {e}") + return { + "success": False, + "message": f"Failed to remove Gateway data: {str(e)}" + } + return { + "success": False, + "message": "Gateway container not found" + } + + try: + # Remove container + container.remove(force=True) + logger.info("Gateway container removed") + + # Remove data if requested + if remove_data: + gateway_dir = os.path.join(self.SOURCE_PATH, self.GATEWAY_DIR) + if os.path.exists(gateway_dir): + shutil.rmtree(gateway_dir) + logger.info(f"Removed Gateway data directory: {gateway_dir}") + return { + "success": True, + "message": "Gateway container and data removed successfully" + } + + return { + "success": True, + "message": "Gateway container removed successfully" + } + + except DockerException as e: + logger.error(f"Failed to remove Gateway container: {e}") + return { + "success": False, + "message": f"Failed to remove Gateway: {str(e)}" + } + except Exception as e: + logger.error(f"Failed to remove Gateway data: {e}") + return { + "success": False, + "message": f"Gateway container removed but failed to remove data: {str(e)}" + } + + def get_logs(self, tail: int = 100) -> Dict[str, any]: + """Get logs from the Gateway container""" + container = self._get_gateway_container() + + if container is None: + return { + "success": False, + "message": "Gateway container not found" + } + + try: + logs = container.logs(tail=tail, timestamps=True).decode('utf-8') + return { + "success": True, + "logs": logs + } + except DockerException as e: + logger.error(f"Failed to get Gateway logs: {e}") + return { + "success": False, + "message": f"Failed to get logs: {str(e)}" + } diff --git a/services/gateway_transaction_poller.py b/services/gateway_transaction_poller.py new file mode 100644 index 00000000..938283ef --- /dev/null +++ b/services/gateway_transaction_poller.py @@ -0,0 +1,539 @@ +""" +Gateway Transaction Poller + +This service polls blockchain transactions to confirm Gateway swap and CLMM operations. +Unlike CEX connectors that emit events, DEX transactions require active polling until confirmation. + +Additionally polls CLMM position state to keep database in sync with on-chain state. +""" +import asyncio +import logging +from typing import Optional, Dict, List +from datetime import datetime, timedelta, timezone +from decimal import Decimal + +from database import AsyncDatabaseManager +from database.repositories import GatewaySwapRepository, GatewayCLMMRepository +from database.models import GatewayCLMMEvent, GatewayCLMMPosition +from services.gateway_client import GatewayClient + +logger = logging.getLogger(__name__) + + +class GatewayTransactionPoller: + """ + Polls Gateway for transaction status updates and position state. + + - Transaction polling: Confirms pending swap/CLMM transactions + - Position polling: Updates CLMM position state (in_range, liquidity, fees) + + Unlike CEX connectors that emit events when orders fill, DEX transactions + need to be polled until they are confirmed on-chain or fail. + """ + + def __init__( + self, + db_manager: AsyncDatabaseManager, + gateway_client: GatewayClient, + poll_interval: int = 10, # Poll every 10 seconds for transactions + position_poll_interval: int = 300, # Poll every 5 minutes for positions + max_retry_age: int = 3600 # Stop retrying after 1 hour + ): + self.db_manager = db_manager + self.gateway_client = gateway_client + self.poll_interval = poll_interval + self.position_poll_interval = position_poll_interval + self.max_retry_age = max_retry_age + self._running = False + self._poll_task: Optional[asyncio.Task] = None + self._position_poll_task: Optional[asyncio.Task] = None + self._last_position_poll: Optional[datetime] = None + + async def start(self): + """Start the polling service.""" + if self._running: + logger.warning("GatewayTransactionPoller already running") + return + + self._running = True + self._poll_task = asyncio.create_task(self._poll_loop()) + self._position_poll_task = asyncio.create_task(self._position_poll_loop()) + logger.info(f"GatewayTransactionPoller started (tx_poll={self.poll_interval}s, pos_poll={self.position_poll_interval}s)") + + async def stop(self): + """Stop the polling service.""" + if not self._running: + return + + self._running = False + + # Cancel transaction polling task + if self._poll_task: + self._poll_task.cancel() + try: + await self._poll_task + except asyncio.CancelledError: + pass + + # Cancel position polling task + if self._position_poll_task: + self._position_poll_task.cancel() + try: + await self._position_poll_task + except asyncio.CancelledError: + pass + + logger.info("GatewayTransactionPoller stopped") + + async def _poll_loop(self): + """Main polling loop.""" + while self._running: + try: + await self._poll_pending_transactions() + except Exception as e: + logger.error(f"Error in poll loop: {e}", exc_info=True) + + # Wait before next poll + try: + await asyncio.sleep(self.poll_interval) + except asyncio.CancelledError: + break + + async def _poll_pending_transactions(self): + """Poll all pending transactions and update their status.""" + try: + async with self.db_manager.get_session_context() as session: + swap_repo = GatewaySwapRepository(session) + clmm_repo = GatewayCLMMRepository(session) + + # Get pending swaps + pending_swaps = await swap_repo.get_pending_swaps(limit=100) + logger.debug(f"Found {len(pending_swaps)} pending swaps") + + for swap in pending_swaps: + # Skip if too old (likely failed without proper error) + age = (datetime.now(timezone.utc) - swap.timestamp).total_seconds() + if age > self.max_retry_age: + logger.warning(f"Swap {swap.transaction_hash} exceeded max retry age, marking as FAILED") + await swap_repo.update_swap_status( + transaction_hash=swap.transaction_hash, + status="FAILED", + error_message="Transaction confirmation timeout" + ) + continue + + # Poll transaction status + await self._poll_swap_transaction(swap, swap_repo) + + # Get pending CLMM events + pending_events = await clmm_repo.get_pending_events(limit=100) + logger.debug(f"Found {len(pending_events)} pending CLMM events") + + for event in pending_events: + # Skip if too old + age = (datetime.now(timezone.utc) - event.timestamp).total_seconds() + if age > self.max_retry_age: + logger.warning(f"CLMM event {event.transaction_hash} exceeded max retry age, marking as FAILED") + await clmm_repo.update_event_status( + transaction_hash=event.transaction_hash, + status="FAILED", + error_message="Transaction confirmation timeout" + ) + continue + + # Poll transaction status + await self._poll_clmm_event_transaction(event, clmm_repo) + + except Exception as e: + logger.error(f"Error polling pending transactions: {e}", exc_info=True) + + async def _poll_swap_transaction(self, swap, swap_repo: GatewaySwapRepository): + """Poll a specific swap transaction status.""" + try: + # Parse network into chain and network + parts = swap.network.split('-', 1) + if len(parts) != 2: + logger.error(f"Invalid network format for swap {swap.transaction_hash}: {swap.network}") + return + + chain, network = parts + + # Check transaction status on Gateway/blockchain + # Note: This is a placeholder - actual implementation depends on Gateway API + status_result = await self._check_transaction_status( + chain=chain, + network=network, + tx_hash=swap.transaction_hash + ) + + if status_result: + if status_result["status"] == "CONFIRMED": + logger.info(f"Swap transaction confirmed: {swap.transaction_hash}") + await swap_repo.update_swap_status( + transaction_hash=swap.transaction_hash, + status="CONFIRMED", + gas_fee=Decimal(str(status_result.get("gas_fee", 0))) if status_result.get("gas_fee") else None, + gas_token=status_result.get("gas_token") + ) + elif status_result["status"] == "FAILED": + logger.warning(f"Swap transaction failed: {swap.transaction_hash}") + await swap_repo.update_swap_status( + transaction_hash=swap.transaction_hash, + status="FAILED", + error_message=status_result.get("error_message", "Transaction failed on-chain") + ) + # If status is still pending, do nothing and retry later + + except Exception as e: + logger.error(f"Error polling swap transaction {swap.transaction_hash}: {e}") + + async def _poll_clmm_event_transaction(self, event, clmm_repo: GatewayCLMMRepository): + """Poll a specific CLMM event transaction status.""" + try: + # Get the position to access network info + position = await clmm_repo.get_position_by_address( + position_address=(await self.db_manager.get_session_context().__aenter__()) + .query(GatewayCLMMEvent) + .filter(GatewayCLMMEvent.id == event.id) + .first() + .position.position_address + ) + + if not position: + logger.error(f"Position not found for CLMM event {event.transaction_hash}") + return + + # Parse network + parts = position.network.split('-', 1) + if len(parts) != 2: + logger.error(f"Invalid network format for CLMM event {event.transaction_hash}: {position.network}") + return + + chain, network = parts + + # Check transaction status + status_result = await self._check_transaction_status( + chain=chain, + network=network, + tx_hash=event.transaction_hash + ) + + if status_result: + if status_result["status"] == "CONFIRMED": + logger.info(f"CLMM event transaction confirmed: {event.transaction_hash}") + await clmm_repo.update_event_status( + transaction_hash=event.transaction_hash, + status="CONFIRMED", + gas_fee=Decimal(str(status_result.get("gas_fee", 0))) if status_result.get("gas_fee") else None, + gas_token=status_result.get("gas_token") + ) + + # Update position state based on event type + await self._update_position_from_event(event, clmm_repo) + + elif status_result["status"] == "FAILED": + logger.warning(f"CLMM event transaction failed: {event.transaction_hash}") + await clmm_repo.update_event_status( + transaction_hash=event.transaction_hash, + status="FAILED", + error_message=status_result.get("error_message", "Transaction failed on-chain") + ) + + except Exception as e: + logger.error(f"Error polling CLMM event transaction {event.transaction_hash}: {e}") + + async def _update_position_from_event(self, event, clmm_repo: GatewayCLMMRepository): + """Update CLMM position state based on confirmed event.""" + try: + # Get position through session + async with self.db_manager.get_session_context() as session: + result = await session.execute( + session.query(GatewayCLMMEvent).filter(GatewayCLMMEvent.id == event.id) + ) + event_with_position = result.scalar_one_or_none() + + if not event_with_position or not event_with_position.position: + logger.error(f"Position not found for event {event.id}") + return + + position = event_with_position.position + + if event.event_type == "CLOSE": + await clmm_repo.close_position(position.position_address) + + elif event.event_type == "COLLECT_FEES": + # Add collected fees to cumulative total + if event.base_fee_collected or event.quote_fee_collected: + new_base_collected = float(position.base_fee_collected or 0) + float(event.base_fee_collected or 0) + new_quote_collected = float(position.quote_fee_collected or 0) + float(event.quote_fee_collected or 0) + + await clmm_repo.update_position_fees( + position_address=position.position_address, + base_fee_collected=Decimal(str(new_base_collected)), + quote_fee_collected=Decimal(str(new_quote_collected)), + base_fee_pending=Decimal("0"), + quote_fee_pending=Decimal("0") + ) + + except Exception as e: + logger.error(f"Error updating position from event: {e}", exc_info=True) + + async def _check_transaction_status( + self, + chain: str, + network: str, + tx_hash: str + ) -> Optional[Dict]: + """ + Check transaction status on blockchain via Gateway. + + Returns: + Dict with status, gas_fee, gas_token, and error_message if available. + None if transaction not yet confirmed or pending. + """ + try: + # Check if Gateway is available + if not await self.gateway_client.ping(): + logger.warning("Gateway not available for transaction polling") + return None + + # Reconstruct network_id from chain and network + network_id = f"{chain}-{network}" + + # Poll transaction status from Gateway + result = await self.gateway_client.poll_transaction( + network_id=network_id, + tx_hash=tx_hash + ) + + # Check if we got a valid response + if result is None or not isinstance(result, dict): + logger.warning(f"Invalid response from Gateway for transaction {tx_hash} on {network_id}: {result}") + return None + + logger.debug(f"Polled transaction {tx_hash} on {network_id}: txStatus={result.get('txStatus')}") + + # Parse the response with defensive checks + tx_status = result.get("txStatus") + tx_data = result.get("txData") or {} + meta = tx_data.get("meta") if isinstance(tx_data, dict) else {} + error = meta.get("err") if isinstance(meta, dict) else None + + # Determine gas token based on chain + gas_token = { + "solana": "SOL", + "ethereum": "ETH", + "arbitrum": "ETH", + "optimism": "ETH", + "polygon": "MATIC", + "avalanche": "AVAX" + }.get(chain, "UNKNOWN") + + # Transaction is confirmed if txStatus == 1 and no error + if tx_status == 1 and error is None: + return { + "status": "CONFIRMED", + "gas_fee": result.get("fee", 0), + "gas_token": gas_token, + "error_message": None + } + + # Transaction failed if there's an error + if error is not None: + error_msg = str(error) if error else "Transaction failed on-chain" + return { + "status": "FAILED", + "gas_fee": result.get("fee", 0), + "gas_token": gas_token, + "error_message": error_msg + } + + # Transaction still pending (txStatus == 0 or not finalized) + return None + + except Exception as e: + logger.error(f"Error checking transaction status for {tx_hash}: {e}") + return None + + async def poll_transaction_once(self, tx_hash: str, network_id: str, wallet_address: Optional[str] = None) -> Optional[Dict]: + """ + Poll a specific transaction once (useful for immediate status checks). + + Args: + tx_hash: Transaction hash + network_id: Network ID in format 'chain-network' (e.g., 'solana-mainnet-beta') + wallet_address: Optional wallet address for verification + + Returns: + Transaction status dict or None if pending + """ + parts = network_id.split('-', 1) + if len(parts) != 2: + logger.error(f"Invalid network format: {network_id}") + return None + + chain, network = parts + return await self._check_transaction_status(chain, network, tx_hash) + + # ============================================ + # Position State Polling + # ============================================ + + async def _position_poll_loop(self): + """Position state polling loop (runs less frequently).""" + while self._running: + try: + # Check if it's time to poll positions + now = datetime.now(timezone.utc) + if self._last_position_poll is None or \ + (now - self._last_position_poll).total_seconds() >= self.position_poll_interval: + await self._poll_open_positions() + self._last_position_poll = now + + # Sleep for a short time to avoid busy waiting + await asyncio.sleep(10) + except asyncio.CancelledError: + break + except Exception as e: + logger.error(f"Error in position poll loop: {e}", exc_info=True) + await asyncio.sleep(10) + + async def _poll_open_positions(self): + """Poll all open CLMM positions and update their state.""" + try: + # Check if Gateway is available + if not await self.gateway_client.ping(): + logger.debug("Gateway not available, skipping position polling") + return + + async with self.db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + + # Get all open positions + open_positions = await clmm_repo.get_open_positions() + if not open_positions: + logger.debug("No open CLMM positions to poll") + return + + logger.info(f"Polling {len(open_positions)} open CLMM positions") + + # Extract position details before closing session + position_details = [ + { + "position_address": pos.position_address, + "pool_address": pos.pool_address, + "connector": pos.connector, + "network": pos.network, + "wallet_address": pos.wallet_address + } + for pos in open_positions + ] + + # Poll each position in a separate session + for pos_detail in position_details: + try: + async with self.db_manager.get_session_context() as session: + clmm_repo = GatewayCLMMRepository(session) + position = await clmm_repo.get_position_by_address(pos_detail["position_address"]) + if position and position.status == "OPEN": + await self._refresh_position_state(position, clmm_repo) + except Exception as e: + logger.warning(f"Failed to poll position {pos_detail['position_address']}: {e}") + continue + + except Exception as e: + logger.error(f"Error polling open positions: {e}", exc_info=True) + + async def _refresh_position_state(self, position: GatewayCLMMPosition, clmm_repo: GatewayCLMMRepository): + """ + Refresh a single position's state from Gateway. + + Updates: + - in_range status + - liquidity amounts + - pending fees + - position status (if closed externally) + """ + try: + # Parse network to get chain and network name + parts = position.network.split('-', 1) + if len(parts) != 2: + logger.error(f"Invalid network format for position {position.position_address}: {position.network}") + return + + chain, network = parts + + # Get all positions for this pool from Gateway + try: + positions_list = await self.gateway_client.clmm_positions_owned( + connector=position.connector, + network=network, + wallet_address=position.wallet_address, + pool_address=position.pool_address + ) + + # Find our specific position in the list + result = None + if isinstance(positions_list, list): + for pos in positions_list: + if pos.get("address") == position.position_address: + result = pos + break + + # If position not found, it was closed externally + if result is None: + logger.info(f"Position {position.position_address} not found on Gateway, marking as CLOSED") + await clmm_repo.close_position(position.position_address) + return + + except Exception as e: + logger.warning(f"Error fetching position {position.position_address} from Gateway: {e}") + return + + # Extract current state + current_price = Decimal(str(result.get("price", 0))) + lower_price = Decimal(str(result.get("lowerPrice", 0))) if result.get("lowerPrice") else Decimal("0") + upper_price = Decimal(str(result.get("upperPrice", 0))) if result.get("upperPrice") else Decimal("0") + + # Calculate in_range status + in_range = "UNKNOWN" + if current_price > 0 and lower_price > 0 and upper_price > 0: + if lower_price <= current_price <= upper_price: + in_range = "IN_RANGE" + else: + in_range = "OUT_OF_RANGE" + + # Extract token amounts + base_token_amount = Decimal(str(result.get("baseTokenAmount", 0))) + quote_token_amount = Decimal(str(result.get("quoteTokenAmount", 0))) + + # Check if position has been closed (zero liquidity) + if base_token_amount == 0 and quote_token_amount == 0: + logger.info(f"Position {position.position_address} has zero liquidity, marking as CLOSED") + await clmm_repo.close_position(position.position_address) + return + + # Update liquidity amounts and in_range status + await clmm_repo.update_position_liquidity( + position_address=position.position_address, + base_token_amount=base_token_amount, + quote_token_amount=quote_token_amount, + in_range=in_range + ) + + # Update pending fees if available + base_fee_pending = Decimal(str(result.get("baseFeeAmount", 0))) + quote_fee_pending = Decimal(str(result.get("quoteFeeAmount", 0))) + + if base_fee_pending or quote_fee_pending: + await clmm_repo.update_position_fees( + position_address=position.position_address, + base_fee_pending=base_fee_pending, + quote_fee_pending=quote_fee_pending + ) + + logger.debug(f"Refreshed position {position.position_address}: in_range={in_range}, " + f"base={base_token_amount}, quote={quote_token_amount}") + + except Exception as e: + logger.error(f"Error refreshing position state {position.position_address}: {e}", exc_info=True) diff --git a/setup.sh b/setup.sh index 6b491e3c..819e39dc 100755 --- a/setup.sh +++ b/setup.sh @@ -30,6 +30,12 @@ echo -n "API password [default: admin]: " read PASSWORD PASSWORD=${PASSWORD:-admin} +echo "" +echo -e "${YELLOW}Gateway Configuration (Optional)${NC}" +echo -n "Gateway passphrase [default: admin, press Enter to skip]: " +read GATEWAY_PASSPHRASE +GATEWAY_PASSPHRASE=${GATEWAY_PASSPHRASE:-admin} + # Set paths and defaults BOTS_PATH=$(pwd) @@ -101,6 +107,12 @@ AWS_S3_DEFAULT_BUCKET_NAME=$S3_BUCKET LOGFIRE_ENVIRONMENT=$LOGFIRE_ENV BANNED_TOKENS=$BANNED_TOKENS +# ================================================================= +# 🌐 Gateway Configuration (GATEWAY_*) - Optional +# ================================================================= +GATEWAY_PASSPHRASE=$GATEWAY_PASSPHRASE +GATEWAY_URL=http://localhost:15888 + # ================================================================= # 📁 Legacy Settings (maintained for backward compatibility) # ================================================================= @@ -155,4 +167,64 @@ docker pull hummingbot/hummingbot:latest & # Wait for both operations to complete wait -echo -e "${GREEN}✅ All Docker operations completed!${NC}" +echo -e "${GREEN}✅ Docker containers started!${NC}" +echo "" + +# Wait for PostgreSQL to be ready +echo -e "${YELLOW}⏳ Waiting for PostgreSQL to initialize...${NC}" +sleep 5 + +# Check PostgreSQL connection +MAX_RETRIES=30 +RETRY_COUNT=0 +DB_READY=false + +while [ $RETRY_COUNT -lt $MAX_RETRIES ]; do + if docker exec hummingbot-postgres pg_isready -U hbot -d hummingbot_api > /dev/null 2>&1; then + DB_READY=true + break + fi + RETRY_COUNT=$((RETRY_COUNT + 1)) + echo -ne "\r${YELLOW}⏳ Waiting for database... ($RETRY_COUNT/$MAX_RETRIES)${NC}" + sleep 2 +done +echo "" + +if [ "$DB_READY" = true ]; then + echo -e "${GREEN}✅ PostgreSQL is ready!${NC}" + + # Verify database and user exist + echo -e "${YELLOW}🔍 Verifying database configuration...${NC}" + + # Check if hbot user exists + USER_EXISTS=$(docker exec hummingbot-postgres psql -U postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='hbot'" 2>/dev/null) + + # Check if database exists + DB_EXISTS=$(docker exec hummingbot-postgres psql -U postgres -tAc "SELECT 1 FROM pg_database WHERE datname='hummingbot_api'" 2>/dev/null) + + if [ "$USER_EXISTS" = "1" ] && [ "$DB_EXISTS" = "1" ]; then + echo -e "${GREEN}✅ Database 'hummingbot_api' and user 'hbot' verified successfully!${NC}" + else + echo -e "${YELLOW}⚠️ Database initialization may be incomplete. Running manual initialization...${NC}" + + # Run the init script manually + docker exec -i hummingbot-postgres psql -U postgres < init-db.sql + + if [ $? -eq 0 ]; then + echo -e "${GREEN}✅ Database manually initialized successfully!${NC}" + else + echo -e "${RED}❌ Failed to initialize database. See troubleshooting below.${NC}" + fi + fi +else + echo -e "${RED}❌ PostgreSQL failed to start within timeout period${NC}" + echo "" + echo -e "${YELLOW}Troubleshooting steps:${NC}" + echo "1. Check PostgreSQL logs: docker logs hummingbot-postgres" + echo "2. Verify container status: docker ps -a | grep postgres" + echo "3. Try removing old volumes: docker compose down -v && docker compose up emqx postgres -d" + echo "4. Manually verify database: docker exec -it hummingbot-postgres psql -U postgres" + echo "" +fi + +echo -e "${GREEN}✅ Setup completed!${NC}"