diff --git a/.claude/settings.local.json b/.claude/settings.local.json index d33f044..89c61ca 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -5,7 +5,18 @@ "Bash(uv add:*)", "Bash(uv sync:*)", "Bash(find:*)", - "WebFetch(domain:docs.kalshi.com)" + "WebFetch(domain:docs.kalshi.com)", + "Bash(git rebase:*)", + "Bash(gh pr view:*)", + "Bash(gh pr diff:*)", + "WebFetch(domain:docs.polymarket.com)", + "WebSearch", + "Bash(python:*)", + "WebFetch(domain:github.com)", + "Bash(bun add:*)", + "Bash(ln:*)", + "Bash(grep:*)", + "Bash(bun run build:*)" ], "deny": [], "ask": [] diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..57cdd9b --- /dev/null +++ b/.dockerignore @@ -0,0 +1,28 @@ +# Environment and secrets +.env +.env.* + +# Python +__pycache__/ +*.py[cod] +.venv/ +venv/ + +# IDE +.vscode/ +.idea/ + +# Git +.git/ + +# Testing +.pytest_cache/ +tests/ + +# Documentation (not needed in container) +examples/ +wiki/ + +# Development files +.dev/ +.claude/ diff --git a/.env.example b/.env.example index fd4808a..640c7cc 100644 --- a/.env.example +++ b/.env.example @@ -2,6 +2,12 @@ POLYMARKET_PRIVATE_KEY=0x1234567890abcdef... POLYMARKET_FUNDER=0xYourFunderAddressHere +# Polymarket Builder API (for CTF operations: split/merge/redeem) +# Get these from Polymarket's Builder API +BUILDER_API_KEY=your-builder-api-key +BUILDER_SECRET=your-builder-secret-base64 +BUILDER_PASS_PHRASE=your-builder-passphrase + # Opinion Trading Configuration OPINION_API_KEY=your_api_key_here OPINION_PRIVATE_KEY=0x1234567890abcdef... diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..ac7695a --- /dev/null +++ b/Dockerfile @@ -0,0 +1,33 @@ +# Dr. Manhattan MCP Server - SSE Transport +# For Railway deployment + +FROM python:3.13-slim + +WORKDIR /app + +# Install uv for fast dependency management (per CLAUDE.md rule 3) +RUN pip install --no-cache-dir uv + +# Copy dependency files first (layer caching optimization) +COPY pyproject.toml README.md ./ + +# Install dependencies before copying code (changes to code won't invalidate this layer) +RUN uv pip install --system ".[mcp]" + +# Copy source code (this layer changes frequently) +COPY dr_manhattan/ ./dr_manhattan/ + +# Expose port (Railway will set PORT env var) +EXPOSE 8080 + +# Environment defaults +ENV PORT=8080 +ENV LOG_LEVEL=INFO +ENV HOST=0.0.0.0 + +# Health check using Python (curl not available in python:slim) +HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ + CMD python -c "import urllib.request; urllib.request.urlopen('http://localhost:8080/health')" || exit 1 + +# Run SSE server +CMD ["python", "-m", "dr_manhattan.mcp.server_sse"] diff --git a/README.md b/README.md index 7b12268..6e0a455 100644 --- a/README.md +++ b/README.md @@ -29,12 +29,23 @@ dr_manhattan/ │ ├── websocket.py # WebSocket base class │ └── errors.py # Exception hierarchy ├── exchanges/ # Exchange implementations -│ ├── polymarket.py -│ ├── polymarket_ws.py +│ ├── polymarket/ # Polymarket (mixin-based package) → [detailed docs](dr_manhattan/exchanges/polymarket/README.md) +│ │ ├── __init__.py # Unified Polymarket class +│ │ ├── polymarket_core.py # Constants, init, request helpers +│ │ ├── polymarket_clob.py # CLOB API (orders, positions) +│ │ ├── polymarket_gamma.py # Gamma API (markets, events, search) +│ │ ├── polymarket_data.py # Data API (trades, analytics) +│ │ ├── polymarket_ctf.py # CTF (split/merge/redeem) +│ │ ├── polymarket_ws.py # Market/User WebSocket +│ │ ├── polymarket_ws_ext.py # Sports/RTDS WebSocket +│ │ ├── polymarket_builder.py # Builder API +│ │ └── polymarket_operator.py # Operator API +│ ├── kalshi.py │ ├── opinion.py │ ├── limitless.py │ ├── limitless_ws.py -│ └── predictfun.py +│ ├── predictfun.py +│ └── predictfun_ws.py ├── models/ # Data models │ ├── market.py │ ├── order.py @@ -210,6 +221,28 @@ Add to Claude Desktop config (`~/Library/Application Support/Claude/claude_deskt } ``` +#### Remote Server (No Installation Required) + +Connect to the hosted MCP server via SSE. No private keys needed - uses Polymarket Builder profile: + +```json +{ + "mcpServers": { + "dr-manhattan": { + "type": "sse", + "url": "https://dr-manhattan-mcp-production.up.railway.app/sse", + "headers": { + "X-Polymarket-Api-Key": "your_api_key", + "X-Polymarket-Api-Secret": "your_api_secret", + "X-Polymarket-Passphrase": "your_passphrase" + } + } + } +} +``` + +**Note:** Remote server supports Polymarket trading only. Other exchanges are read-only for security. See [wiki/mcp/remote-server.md](wiki/mcp/remote-server.md) for details. + After restarting, you can: - "Show my Polymarket balance" - "Find active prediction markets" diff --git a/dr_manhattan/exchanges/__init__.py b/dr_manhattan/exchanges/__init__.py index 1afb86b..eba393b 100644 --- a/dr_manhattan/exchanges/__init__.py +++ b/dr_manhattan/exchanges/__init__.py @@ -2,10 +2,14 @@ from .limitless import Limitless from .opinion import Opinion from .polymarket import Polymarket +from .polymarket.polymarket_builder import PolymarketBuilder +from .polymarket.polymarket_operator import PolymarketOperator from .predictfun import PredictFun __all__ = [ "Polymarket", + "PolymarketBuilder", + "PolymarketOperator", "Limitless", "Opinion", "PredictFun", diff --git a/dr_manhattan/exchanges/polymarket.py b/dr_manhattan/exchanges/polymarket.py deleted file mode 100644 index 22a1f6b..0000000 --- a/dr_manhattan/exchanges/polymarket.py +++ /dev/null @@ -1,1930 +0,0 @@ -import json -import logging -import re -import traceback -from dataclasses import dataclass -from datetime import datetime, timedelta, timezone -from typing import Any, Callable, Dict, Iterable, List, Literal, Optional, Sequence - -import pandas as pd -import requests -from py_clob_client.client import ClobClient -from py_clob_client.clob_types import AssetType, BalanceAllowanceParams, OrderArgs, OrderType - -from ..base.errors import ( - AuthenticationError, - ExchangeError, - InvalidOrder, - MarketNotFound, - NetworkError, - RateLimitError, -) -from ..base.exchange import Exchange -from ..models import CryptoHourlyMarket -from ..models.market import Market -from ..models.order import Order, OrderSide, OrderStatus, OrderTimeInForce -from ..models.position import Position -from ..utils import setup_logger -from .polymarket_ws import PolymarketUserWebSocket, PolymarketWebSocket - - -@dataclass -class PublicTrade: - proxy_wallet: str - side: str - asset: str - condition_id: str - size: float - price: float - timestamp: datetime - title: str | None - slug: str | None - icon: str | None - event_slug: str | None - outcome: str | None - outcome_index: int | None - name: str | None - pseudonym: str | None - bio: str | None - profile_image: str | None - profile_image_optimized: str | None - transaction_hash: str | None - - -@dataclass -class PricePoint: - timestamp: datetime - price: float - raw: Dict[str, Any] - - -@dataclass -class Tag: - id: str - label: str | None - slug: str | None - force_show: bool | None - force_hide: bool | None - is_carousel: bool | None - published_at: str | None - created_at: str | None - updated_at: str | None - raw: dict - - -class Polymarket(Exchange): - """Polymarket exchange implementation""" - - BASE_URL = "https://gamma-api.polymarket.com" - CLOB_URL = "https://clob.polymarket.com" - PRICES_HISTORY_URL = f"{CLOB_URL}/prices-history" - DATA_API_URL = "https://data-api.polymarket.com" - SUPPORTED_INTERVALS: Sequence[str] = ("1m", "1h", "6h", "1d", "1w", "max") - PRICES_HISTORY_URL = f"{CLOB_URL}/prices-history" - DATA_API_URL = "https://data-api.polymarket.com" - SUPPORTED_INTERVALS: Sequence[str] = ("1m", "1h", "6h", "1d", "1w", "max") - - # Market type tags (Polymarket-specific) - TAG_1H = "102175" # 1-hour crypto price markets - - # Token normalization mapping - TOKEN_ALIASES = { - "BITCOIN": "BTC", - "ETHEREUM": "ETH", - "SOLANA": "SOL", - } - - @staticmethod - def normalize_token(token: str) -> str: - """Normalize token symbol to standard format (e.g., BITCOIN -> BTC)""" - token_upper = token.upper() - return Polymarket.TOKEN_ALIASES.get(token_upper, token_upper) - - @staticmethod - def parse_market_identifier(identifier: str) -> str: - """ - Parse market slug from URL or return slug as-is. - - Supports multiple URL formats: - - https://polymarket.com/event/SLUG - - https://polymarket.com/event/SLUG?param=value - - SLUG (direct slug input) - - Args: - identifier: Market slug or full URL - - Returns: - Market slug - - Example: - >>> Polymarket.parse_market_identifier("fed-decision-in-december") - 'fed-decision-in-december' - >>> Polymarket.parse_market_identifier("https://polymarket.com/event/fed-decision-in-december") - 'fed-decision-in-december' - """ - if not identifier: - return "" - - # If it's a URL, extract the slug - if identifier.startswith("http"): - # Remove query parameters - identifier = identifier.split("?")[0] - # Extract slug from URL - # Format: https://polymarket.com/event/SLUG - parts = identifier.rstrip("/").split("/") - if "event" in parts: - idx = parts.index("event") - if idx + 1 < len(parts): - return parts[idx + 1] - # Fallback: return last part - return parts[-1] - - return identifier - - @property - def id(self) -> str: - return "polymarket" - - @property - def name(self) -> str: - return "Polymarket" - - def __init__(self, config: Optional[Dict[str, Any]] = None): - """Initialize Polymarket exchange""" - super().__init__(config) - self._ws = None - self._user_ws = None - self.private_key = self.config.get("private_key") - self.funder = self.config.get("funder") - self._clob_client = None - self._address = None - - # Initialize CLOB client if private key is provided - if self.private_key: - self._initialize_clob_client() - - def _initialize_clob_client(self): - """Initialize CLOB client with authentication.""" - try: - chain_id = self.config.get("chain_id", 137) - signature_type = self.config.get("signature_type", 2) - - # Initialize authenticated client - self._clob_client = ClobClient( - host=self.CLOB_URL, - key=self.private_key, - chain_id=chain_id, - signature_type=signature_type, - funder=self.funder, - ) - - # Derive and set API credentials for L2 authentication - api_creds = self._clob_client.create_or_derive_api_creds() - if not api_creds: - raise AuthenticationError("Failed to derive API credentials") - - self._clob_client.set_api_creds(api_creds) - - # Verify L2 mode - if self._clob_client.mode < 2: - raise AuthenticationError( - f"Client not in L2 mode (current mode: {self._clob_client.mode})" - ) - - # Store address - try: - self._address = self._clob_client.get_address() - except Exception: - self._address = None - - except AuthenticationError: - raise - except Exception as e: - raise AuthenticationError(f"Failed to initialize CLOB client: {e}") - - def _request(self, method: str, endpoint: str, params: Optional[Dict] = None) -> Any: - """Make HTTP request to Polymarket API with retry logic""" - - @self._retry_on_failure - def _make_request(): - url = f"{self.BASE_URL}{endpoint}" - headers = {} - - if self.api_key: - headers["Authorization"] = f"Bearer {self.api_key}" - - try: - response = requests.request( - method, url, params=params, headers=headers, timeout=self.timeout - ) - - # Handle rate limiting - if response.status_code == 429: - retry_after = int(response.headers.get("Retry-After", 1)) - raise RateLimitError(f"Rate limited. Retry after {retry_after}s") - - response.raise_for_status() - return response.json() - except requests.Timeout as e: - raise NetworkError(f"Request timeout: {e}") - except requests.ConnectionError as e: - raise NetworkError(f"Connection error: {e}") - except requests.HTTPError as e: - if response.status_code == 404: - raise ExchangeError(f"Resource not found: {endpoint}") - elif response.status_code == 401: - raise AuthenticationError(f"Authentication failed: {e}") - elif response.status_code == 403: - raise AuthenticationError(f"Access forbidden: {e}") - else: - raise ExchangeError(f"HTTP error: {e}") - except requests.RequestException as e: - raise ExchangeError(f"Request failed: {e}") - - return _make_request() - - def fetch_markets(self, params: Optional[Dict[str, Any]] = None) -> list[Market]: - """ - Fetch all markets from Polymarket - - Uses CLOB API instead of Gamma API because CLOB includes token IDs - which are required for trading. - """ - - @self._retry_on_failure - def _fetch(): - # Fetch from CLOB API /sampling-markets (includes token IDs and live markets) - try: - response = requests.get(f"{self.CLOB_URL}/sampling-markets", timeout=self.timeout) - - if response.status_code == 200: - result = response.json() - markets_data = result.get("data", result if isinstance(result, list) else []) - - markets = [] - for item in markets_data: - market = self._parse_sampling_market(item) - if market: - markets.append(market) - - # Apply filters if provided - query_params = params or {} - if query_params.get("active") or (not query_params.get("closed", True)): - markets = [m for m in markets if m.is_open] - - # Apply limit if provided - limit = query_params.get("limit") - if limit: - markets = markets[:limit] - - if self.verbose: - print(f"✓ Fetched {len(markets)} markets from CLOB API (sampling-markets)") - - return markets - - except Exception as e: - if self.verbose: - print(f"CLOB API fetch failed: {e}, falling back to Gamma API") - - # Fallback to Gamma API (but won't have token IDs) - query_params = params or {} - if "active" not in query_params and "closed" not in query_params: - query_params = {"active": True, "closed": False, **query_params} - - data = self._request("GET", "/markets", query_params) - markets = [] - for item in data: - market = self._parse_market(item) - markets.append(market) - return markets - - return _fetch() - - def fetch_market(self, market_id: str) -> Market: - """Fetch specific market by ID with retry logic""" - - @self._retry_on_failure - def _fetch(): - try: - data = self._request("GET", f"/markets/{market_id}") - return self._parse_market(data) - except ExchangeError: - raise MarketNotFound(f"Market {market_id} not found") - - return _fetch() - - def fetch_markets_by_slug(self, slug_or_url: str) -> List[Market]: - """ - Fetch all markets from an event by slug or URL. - - For events with multiple markets (e.g., "which day will X happen"), - this returns all markets in the event. - - Args: - slug_or_url: Event slug or full Polymarket URL - - Returns: - List of Market objects with token IDs populated - """ - slug = self.parse_market_identifier(slug_or_url) - - if not slug: - raise ValueError("Empty slug provided") - - try: - response = requests.get(f"{self.BASE_URL}/events?slug={slug}", timeout=self.timeout) - except requests.Timeout as e: - raise NetworkError(f"Request timeout: {e}") - except requests.ConnectionError as e: - raise NetworkError(f"Connection error: {e}") - except requests.RequestException as e: - raise NetworkError(f"Request failed: {e}") - - if response.status_code == 404: - raise MarketNotFound(f"Event not found: {slug}") - elif response.status_code != 200: - raise ExchangeError(f"Failed to fetch event: HTTP {response.status_code}") - - event_data = response.json() - if not event_data or len(event_data) == 0: - raise MarketNotFound(f"Event not found: {slug}") - - event = event_data[0] - markets_data = event.get("markets", []) - - if not markets_data: - raise MarketNotFound(f"No markets found in event: {slug}") - - markets = [] - for market_data in markets_data: - market = self._parse_market(market_data) - - # Compose readable_id: [event_slug, id] - market.metadata["readable_id"] = [slug, market.id] - - # Get token IDs from market data - clob_token_ids = market_data.get("clobTokenIds", []) - if isinstance(clob_token_ids, str): - try: - clob_token_ids = json.loads(clob_token_ids) - except json.JSONDecodeError: - clob_token_ids = [] - - if clob_token_ids: - market.metadata["clobTokenIds"] = clob_token_ids - - markets.append(market) - - return markets - - def get_orderbook(self, token_id: str) -> Dict[str, Any]: - """ - Fetch orderbook for a specific token via REST API. - - Args: - token_id: Token ID to fetch orderbook for - - Returns: - Dictionary with 'bids' and 'asks' arrays - Each entry: {'price': str, 'size': str} - - Example: - >>> orderbook = exchange.get_orderbook(token_id) - >>> best_bid = float(orderbook['bids'][0]['price']) - >>> best_ask = float(orderbook['asks'][0]['price']) - """ - try: - response = requests.get( - f"{self.CLOB_URL}/book", params={"token_id": token_id}, timeout=self.timeout - ) - - if response.status_code == 200: - return response.json() - - return {"bids": [], "asks": []} - - except Exception as e: - if self.verbose: - print(f"Failed to fetch orderbook: {e}") - return {"bids": [], "asks": []} - - def _parse_sampling_market(self, data: Dict[str, Any]) -> Optional[Market]: - """Parse market data from CLOB sampling-markets API response""" - try: - # sampling-markets includes more fields than simplified-markets - condition_id = data.get("condition_id") - if not condition_id: - return None - - # Extract question and description - question = data.get("question", "") - - # Extract tick size (minimum price increment) - # The API returns minimum_tick_size (e.g., 0.01 or 0.001) - # Note: minimum_order_size is different - it's the min shares per order - # Default to 0.01 (standard Polymarket tick size) if not provided - minimum_tick_size = data.get("minimum_tick_size", 0.01) - - # Extract tokens - sampling-markets has them in "tokens" array - tokens_data = data.get("tokens", []) - token_ids = [] - outcomes = [] - prices = {} - - for token in tokens_data: - if isinstance(token, dict): - token_id = token.get("token_id") - outcome = token.get("outcome", "") - price = token.get("price") - - if token_id: - token_ids.append(str(token_id)) - if outcome: - outcomes.append(outcome) - if outcome and price is not None: - try: - prices[outcome] = float(price) - except (ValueError, TypeError): - pass - - # Build metadata with token IDs - metadata = { - **data, - "clobTokenIds": token_ids, - "condition_id": condition_id, - "minimum_tick_size": minimum_tick_size, - } - - return Market( - id=condition_id, - question=question, - outcomes=outcomes if outcomes else ["Yes", "No"], - close_time=None, # Can parse if needed - volume=0, # Not in sampling-markets - liquidity=0, # Not in sampling-markets - prices=prices, - metadata=metadata, - tick_size=minimum_tick_size, - description=data.get("description", ""), - ) - except Exception as e: - if self.verbose: - print(f"Error parsing sampling market: {e}") - return None - - def _parse_clob_market(self, data: Dict[str, Any]) -> Optional[Market]: - """Parse market data from CLOB API response""" - try: - # CLOB API structure - condition_id = data.get("condition_id") - if not condition_id: - return None - - # Extract tokens (already have token_id, outcome, price, winner) - tokens = data.get("tokens", []) - token_ids = [] - outcomes = [] - prices = {} - - for token in tokens: - if isinstance(token, dict): - token_id = token.get("token_id") - outcome = token.get("outcome", "") - price = token.get("price") - - if token_id: - token_ids.append(str(token_id)) - if outcome: - outcomes.append(outcome) - if outcome and price is not None: - try: - prices[outcome] = float(price) - except (ValueError, TypeError): - pass - - # Build metadata with token IDs already included - # Default to 0.01 (standard Polymarket tick size) if not provided - minimum_tick_size = data.get("minimum_tick_size", 0.01) - metadata = { - **data, - "clobTokenIds": token_ids, - "condition_id": condition_id, - "minimum_tick_size": minimum_tick_size, - } - - return Market( - id=condition_id, - question="", # CLOB API doesn't include question text - outcomes=outcomes if outcomes else ["Yes", "No"], - close_time=None, # CLOB API doesn't include end date - volume=0, # CLOB API doesn't include volume - liquidity=0, # CLOB API doesn't include liquidity - prices=prices, - metadata=metadata, - tick_size=minimum_tick_size, - description=data.get("description", ""), - ) - except Exception as e: - if self.verbose: - print(f"Error parsing CLOB market: {e}") - return None - - def _parse_market(self, data: Dict[str, Any]) -> Market: - """Parse market data from API response""" - # Parse outcomes - can be JSON string or list - outcomes_raw = data.get("outcomes", []) - if isinstance(outcomes_raw, str): - try: - outcomes = json.loads(outcomes_raw) - except (json.JSONDecodeError, TypeError): - outcomes = [] - else: - outcomes = outcomes_raw - - # Parse outcome prices - can be JSON string, list, or None - prices_raw = data.get("outcomePrices") - prices_list = [] - - if prices_raw is not None: - if isinstance(prices_raw, str): - try: - prices_list = json.loads(prices_raw) - except (json.JSONDecodeError, TypeError): - prices_list = [] - else: - prices_list = prices_raw - - # Create prices dictionary mapping outcomes to prices - prices = {} - if len(outcomes) == len(prices_list) and prices_list: - for outcome, price in zip(outcomes, prices_list): - try: - price_val = float(price) - # Only add non-zero prices - if price_val > 0: - prices[outcome] = price_val - except (ValueError, TypeError): - pass - - # Fallback: use bestBid/bestAsk if available and no prices found - if not prices and len(outcomes) == 2: - best_bid = data.get("bestBid") - best_ask = data.get("bestAsk") - if best_bid is not None and best_ask is not None: - try: - bid = float(best_bid) - ask = float(best_ask) - if 0 < bid < 1 and 0 < ask <= 1: - # For binary: Yes price ~ask, No price ~(1-ask) - prices[outcomes[0]] = ask - prices[outcomes[1]] = 1.0 - bid - except (ValueError, TypeError): - pass - - # Parse close time - check both endDate and closed status - close_time = self._parse_datetime(data.get("endDate")) - - # Use volumeNum if available, fallback to volume - volume = float(data.get("volumeNum", data.get("volume", 0))) - liquidity = float(data.get("liquidityNum", data.get("liquidity", 0))) - - # Try to extract token IDs from various possible fields - # Gamma API sometimes includes these in the response - metadata = dict(data) - - # Set match_id from groupItemTitle for cross-exchange matching - if "groupItemTitle" in data: - metadata["match_id"] = data["groupItemTitle"] - - if "tokens" in data and data["tokens"]: - metadata["clobTokenIds"] = data["tokens"] - elif "clobTokenIds" not in metadata and "tokenID" in data: - # Single token ID - might be a simplified response - metadata["clobTokenIds"] = [data["tokenID"]] - - # Ensure clobTokenIds is always a list, not a JSON string - if "clobTokenIds" in metadata and isinstance(metadata["clobTokenIds"], str): - try: - metadata["clobTokenIds"] = json.loads(metadata["clobTokenIds"]) - except (json.JSONDecodeError, TypeError): - # If parsing fails, remove it - will be fetched separately - del metadata["clobTokenIds"] - - # Extract tick size - default to 0.01 (standard Polymarket tick size) - # Gamma API may not include this field; CLOB API always does - minimum_tick_size = data.get("minimum_tick_size", 0.01) - metadata["minimum_tick_size"] = minimum_tick_size - - return Market( - id=data.get("id", ""), - question=data.get("question", ""), - outcomes=outcomes, - close_time=close_time, - volume=volume, - liquidity=liquidity, - prices=prices, - metadata=metadata, - tick_size=minimum_tick_size, - description=data.get("description", ""), - ) - - def fetch_token_ids(self, condition_id: str) -> list[str]: - """ - Fetch token IDs for a specific market from CLOB API - - The Gamma API doesn't include token IDs, so we need to fetch them - from the CLOB API when we need to trade. - - Based on actual CLOB API response structure. - - Args: - condition_id: The market/condition ID - - Returns: - List of token IDs as strings - - Raises: - ExchangeError: If token IDs cannot be fetched - """ - try: - # Try simplified-markets endpoint - # Response structure: {"data": [{"condition_id": ..., "tokens": [{"token_id": ..., "outcome": ...}]}]} - try: - response = requests.get(f"{self.CLOB_URL}/simplified-markets", timeout=self.timeout) - - if response.status_code == 200: - result = response.json() - - # Check if response has "data" key - markets_list = result.get("data", result if isinstance(result, list) else []) - - # Find the market with matching condition_id - for market in markets_list: - market_id = market.get("condition_id") or market.get("id") - if market_id == condition_id: - # Extract token IDs from tokens array - # Each token is an object: {"token_id": "...", "outcome": "...", "price": ...} - tokens = market.get("tokens", []) - if tokens and isinstance(tokens, list): - # Extract just the token_id strings - token_ids = [] - for token in tokens: - if isinstance(token, dict) and "token_id" in token: - token_ids.append(str(token["token_id"])) - elif isinstance(token, str): - # In case it's already a string - token_ids.append(token) - - if token_ids: - if self.verbose: - print( - f"✓ Found {len(token_ids)} token IDs via simplified-markets" - ) - for i, tid in enumerate(token_ids): - outcome = ( - tokens[i].get("outcome", f"outcome_{i}") - if isinstance(tokens[i], dict) - else f"outcome_{i}" - ) - print(f" [{i}] {outcome}: {tid}") - return token_ids - - # Fallback: check for clobTokenIds - clob_tokens = market.get("clobTokenIds") - if clob_tokens and isinstance(clob_tokens, list): - token_ids = [str(t) for t in clob_tokens] - if self.verbose: - print(f"✓ Found token IDs via clobTokenIds: {token_ids}") - return token_ids - except Exception as e: - if self.verbose: - print(f"simplified-markets failed: {e}") - - # Try sampling-simplified-markets endpoint - try: - response = requests.get( - f"{self.CLOB_URL}/sampling-simplified-markets", timeout=self.timeout - ) - - if response.status_code == 200: - markets_list = response.json() - if not isinstance(markets_list, list): - markets_list = markets_list.get("data", []) - - for market in markets_list: - market_id = market.get("condition_id") or market.get("id") - if market_id == condition_id: - # Extract from tokens array - tokens = market.get("tokens", []) - if tokens and isinstance(tokens, list): - token_ids = [] - for token in tokens: - if isinstance(token, dict) and "token_id" in token: - token_ids.append(str(token["token_id"])) - elif isinstance(token, str): - token_ids.append(token) - - if token_ids: - if self.verbose: - print( - f"✓ Found token IDs via sampling-simplified-markets: {len(token_ids)} tokens" - ) - return token_ids - except Exception as e: - if self.verbose: - print(f"sampling-simplified-markets failed: {e}") - - # Try markets endpoint - try: - response = requests.get(f"{self.CLOB_URL}/markets", timeout=self.timeout) - - if response.status_code == 200: - markets_list = response.json() - if not isinstance(markets_list, list): - markets_list = markets_list.get("data", []) - - for market in markets_list: - market_id = market.get("condition_id") or market.get("id") - if market_id == condition_id: - # Extract from tokens array - tokens = market.get("tokens", []) - if tokens and isinstance(tokens, list): - token_ids = [] - for token in tokens: - if isinstance(token, dict) and "token_id" in token: - token_ids.append(str(token["token_id"])) - elif isinstance(token, str): - token_ids.append(token) - - if token_ids: - if self.verbose: - print( - f"✓ Found token IDs via markets endpoint: {len(token_ids)} tokens" - ) - return token_ids - except Exception as e: - if self.verbose: - print(f"markets endpoint failed: {e}") - - raise ExchangeError( - f"Could not fetch token IDs for market {condition_id} from any CLOB endpoint" - ) - - except requests.RequestException as e: - raise ExchangeError(f"Network error fetching token IDs: {e}") - - def create_order( - self, - market_id: str, - outcome: str, - side: OrderSide, - price: float, - size: float, - params: Optional[Dict[str, Any]] = None, - time_in_force: OrderTimeInForce = OrderTimeInForce.GTC, - ) -> Order: - """Create order on Polymarket CLOB""" - if not self._clob_client: - raise AuthenticationError("CLOB client not initialized. Private key required.") - - token_id = params.get("token_id") if params else None - if not token_id: - raise InvalidOrder("token_id required in params") - - # Map our OrderTimeInForce to py_clob_client OrderType - order_type_map = { - OrderTimeInForce.GTC: OrderType.GTC, - OrderTimeInForce.FOK: OrderType.FOK, - OrderTimeInForce.IOC: OrderType.GTD, # py_clob_client uses GTD for IOC behavior - } - clob_order_type = order_type_map.get(time_in_force, OrderType.GTC) - - try: - # Create and sign order - order_args = OrderArgs( - token_id=token_id, - price=float(price), - size=float(size), - side=side.value.upper(), - ) - - signed_order = self._clob_client.create_order(order_args) - result = self._clob_client.post_order(signed_order, clob_order_type) - - # Parse result - order_id = result.get("orderID", "") if isinstance(result, dict) else str(result) - status_str = result.get("status", "LIVE") if isinstance(result, dict) else "LIVE" - - status_map = { - "LIVE": OrderStatus.OPEN, - "MATCHED": OrderStatus.FILLED, - "CANCELLED": OrderStatus.CANCELLED, - } - - return Order( - id=order_id, - market_id=market_id, - outcome=outcome, - side=side, - price=price, - size=size, - filled=0, - status=status_map.get(status_str, OrderStatus.OPEN), - created_at=datetime.now(), - updated_at=datetime.now(), - time_in_force=time_in_force, - ) - - except Exception as e: - raise InvalidOrder(f"Order placement failed: {str(e)}") - - def cancel_order(self, order_id: str, market_id: Optional[str] = None) -> Order: - """Cancel order on Polymarket""" - if not self._clob_client: - raise AuthenticationError("CLOB client not initialized. Private key required.") - - try: - result = self._clob_client.cancel(order_id) - if isinstance(result, dict): - return self._parse_order(result) - return Order( - id=order_id, - market_id=market_id or "", - outcome="", - side=OrderSide.BUY, - price=0, - size=0, - filled=0, - status=OrderStatus.CANCELLED, - created_at=datetime.now(), - updated_at=datetime.now(), - ) - except Exception as e: - raise InvalidOrder(f"Failed to cancel order {order_id}: {str(e)}") - - def fetch_order(self, order_id: str, market_id: Optional[str] = None) -> Order: - """Fetch order details""" - data = self._request("GET", f"/orders/{order_id}") - return self._parse_order(data) - - def fetch_open_orders( - self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None - ) -> list[Order]: - """ - Fetch open orders using CLOB client - - Args: - market_id: Can be either the numeric market ID or the hex conditionId. - If numeric, we filter by exact match. If hex (0x...), we use it directly. - """ - if not self._clob_client: - raise AuthenticationError("CLOB client not initialized. Private key required.") - - try: - # Use CLOB client's get_orders method - response = self._clob_client.get_orders() - - # Response is a list directly - if isinstance(response, list): - orders = response - elif isinstance(response, dict) and "data" in response: - orders = response["data"] - else: - if self.verbose: - print(f"Debug: Unexpected response format: {type(response)}") - return [] - - if not orders: - return [] - - # Filter by market_id if provided - # Note: CLOB orders use hex conditionId (0x...) in the 'market' field - if market_id: - orders = [o for o in orders if o.get("market") == market_id] - - # Debug: Print first order's fields to identify size field - if orders and self.verbose: - debug_logger = logging.getLogger(__name__) - debug_logger.debug(f"Sample order fields: {list(orders[0].keys())}") - debug_logger.debug(f"Sample order data: {orders[0]}") - - # Parse orders - return [self._parse_order(order) for order in orders] - except Exception as e: - if self.verbose: - print(f"Warning: Failed to fetch open orders: {e}") - traceback.print_exc() - return [] - - def fetch_positions( - self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None - ) -> list[Position]: - """ - Fetch current positions from Polymarket. - - Note: On Polymarket, positions are represented by conditional token balances. - This method queries token balances for the specified market. - Since positions require market-specific token data, we can't query positions - without a market context. Returns empty list if no market_id is provided. - """ - if not self._clob_client: - raise AuthenticationError("CLOB client not initialized. Private key required.") - - # Positions require market context on Polymarket - # Without market_id, we can't determine which tokens to query - if not market_id: - return [] - - # For now, return empty positions list - # Positions will be queried on-demand when we have the market object with token IDs - # This avoids the chicken-and-egg problem of needing to fetch the market just to get positions - return [] - - def fetch_positions_for_market(self, market: Market) -> list[Position]: - """ - Fetch positions for a specific market object. - This is the recommended way to fetch positions on Polymarket. - - Args: - market: Market object with token IDs in metadata - - Returns: - List of Position objects - """ - if not self._clob_client: - raise AuthenticationError("CLOB client not initialized. Private key required.") - - try: - positions = [] - token_ids_raw = market.metadata.get("clobTokenIds", []) - - # Parse token IDs if they're stored as JSON string - if isinstance(token_ids_raw, str): - token_ids = json.loads(token_ids_raw) - else: - token_ids = token_ids_raw - - if not token_ids or len(token_ids) < 2: - return positions - - # Query balance for each token - for i, token_id in enumerate(token_ids): - try: - params_obj = BalanceAllowanceParams( - asset_type=AssetType.CONDITIONAL, token_id=token_id - ) - balance_data = self._clob_client.get_balance_allowance(params=params_obj) - - if isinstance(balance_data, dict) and "balance" in balance_data: - balance_raw = balance_data["balance"] - # Convert from wei (6 decimals) - size = float(balance_raw) / 1e6 if balance_raw else 0.0 - - if size > 0: - # Determine outcome from market.outcomes - outcome = ( - market.outcomes[i] - if i < len(market.outcomes) - else ("Yes" if i == 0 else "No") - ) - - # Get current price from market.prices - current_price = market.prices.get(outcome, 0.0) - - position = Position( - market_id=market.id, - outcome=outcome, - size=size, - average_price=0.0, # Not available from balance query - current_price=current_price, - ) - positions.append(position) - except Exception as e: - if self.verbose: - print(f"Failed to fetch balance for token {token_id}: {e}") - continue - - return positions - - except Exception as e: - raise ExchangeError(f"Failed to fetch positions for market: {str(e)}") - - def find_crypto_hourly_market( - self, - token_symbol: Optional[str] = None, - min_liquidity: float = 0.0, - limit: int = 100, - is_active: bool = True, - is_expired: bool = False, - params: Optional[Dict[str, Any]] = None, - ) -> Optional[tuple[Market, Any]]: - """ - Find crypto hourly markets on Polymarket using tag-based filtering. - - Polymarket uses TAG_1H for 1-hour crypto price markets, which is more - efficient than pattern matching on all markets. - - Args: - token_symbol: Filter by token (e.g., "BTC", "ETH", "SOL") - min_liquidity: Minimum liquidity required - limit: Maximum markets to fetch - is_active: If True, only return markets currently in progress (expiring within 1 hour) - is_expired: If True, only return expired markets. If False, exclude expired markets. - params: Additional parameters (can include 'tag_id' to override default tag) - - Returns: - Tuple of (Market, CryptoHourlyMarket) or None - """ - logger = setup_logger(__name__) - - # Use tag-based filtering for efficiency - tag_id = (params or {}).get("tag_id", self.TAG_1H) - - if self.verbose: - logger.info(f"Searching for crypto hourly markets with tag: {tag_id}") - - all_markets = [] - offset = 0 - page_size = 100 - - while len(all_markets) < limit: - # Use gamma-api with tag filtering - url = f"{self.BASE_URL}/markets" - query_params = { - "active": "true", - "closed": "false", - "limit": min(page_size, limit - len(all_markets)), - "offset": offset, - "order": "volume", - "ascending": "false", - } - - if tag_id: - query_params["tag_id"] = tag_id - - try: - response = requests.get(url, params=query_params, timeout=10) - response.raise_for_status() - data = response.json() - - markets_data = data if isinstance(data, list) else [] - if not markets_data: - break - - # Parse markets - for market_data in markets_data: - market = self._parse_market(market_data) - if market: - all_markets.append(market) - - offset += len(markets_data) - - # If we got fewer markets than requested, we've reached the end - if len(markets_data) < page_size: - break - - except Exception as e: - if self.verbose: - logger.error(f"Failed to fetch tagged markets: {e}") - break - - if self.verbose: - logger.info(f"Found {len(all_markets)} markets with tag {tag_id}") - - # Now parse and filter the markets - # Pattern for "Up or Down" markets (e.g., "Bitcoin Up or Down - November 2, 7AM ET") - up_down_pattern = re.compile( - r"(?PBitcoin|Ethereum|Solana|BTC|ETH|SOL|XRP)\s+Up or Down", re.IGNORECASE - ) - - # Pattern for strike price markets (e.g., "Will BTC be above $95,000 at 5:00 PM ET?") - strike_pattern = re.compile( - r"(?:(?PBTC|ETH|SOL|BITCOIN|ETHEREUM|SOLANA)\s+.*?" - r"(?Pabove|below|over|under|reach)\s+" - r"[\$]?(?P[\d,]+(?:\.\d+)?))|" - r"(?:[\$]?(?P[\d,]+(?:\.\d+)?)\s+.*?" - r"(?PBTC|ETH|SOL|BITCOIN|ETHEREUM|SOLANA))", - re.IGNORECASE, - ) - - for market in all_markets: - # Must be binary and open - if not market.is_binary or not market.is_open: - continue - - # Check liquidity - if market.liquidity < min_liquidity: - continue - - # Check expiry time filtering based on is_active and is_expired parameters - if market.close_time: - # Handle timezone-aware datetime - if market.close_time.tzinfo is not None: - now = datetime.now(timezone.utc) - else: - now = datetime.now() - - time_until_expiry = (market.close_time - now).total_seconds() - - # Apply is_expired filter - if is_expired: - # Only include expired markets - if time_until_expiry > 0: - continue - else: - # Exclude expired markets - if time_until_expiry <= 0: - continue - - # Apply is_active filter (only applies to non-expired markets) - if is_active and not is_expired: - # For active hourly markets, only include if expiring within 1 hour - # This ensures we get currently active hourly candles - if time_until_expiry > 3600: # 1 hour in seconds - continue - - # Try "Up or Down" pattern first - up_down_match = up_down_pattern.search(market.question) - if up_down_match: - parsed_token = self.normalize_token(up_down_match.group("token")) - - # Apply token filter - if token_symbol and parsed_token != self.normalize_token(token_symbol): - continue - - expiry = ( - market.close_time if market.close_time else datetime.now() + timedelta(hours=1) - ) - - crypto_market = CryptoHourlyMarket( - token_symbol=parsed_token, - expiry_time=expiry, - strike_price=None, - market_type="up_down", - ) - - return (market, crypto_market) - - # Try strike price pattern - strike_match = strike_pattern.search(market.question) - if strike_match: - parsed_token = self.normalize_token( - strike_match.group("token1") or strike_match.group("token2") or "" - ) - parsed_price_str = ( - strike_match.group("price1") or strike_match.group("price2") or "0" - ) - parsed_price = float(parsed_price_str.replace(",", "")) - - # Apply filters - if token_symbol and parsed_token != self.normalize_token(token_symbol): - continue - - expiry = ( - market.close_time if market.close_time else datetime.now() + timedelta(hours=1) - ) - - crypto_market = CryptoHourlyMarket( - token_symbol=parsed_token, - expiry_time=expiry, - strike_price=parsed_price, - market_type="strike_price", - ) - - return (market, crypto_market) - - return None - - def fetch_balance(self) -> Dict[str, float]: - """ - Fetch account balance from Polymarket using CLOB client - - Returns: - Dictionary with balance information including USDC - """ - if not self._clob_client: - raise AuthenticationError("CLOB client not initialized. Private key required.") - - try: - # Fetch USDC (collateral) balance - params = BalanceAllowanceParams(asset_type=AssetType.COLLATERAL) - balance_data = self._clob_client.get_balance_allowance(params=params) - - # Extract balance from response - usdc_balance = 0.0 - if isinstance(balance_data, dict) and "balance" in balance_data: - try: - # Balance is returned as a string in wei (6 decimals for USDC) - usdc_balance = float(balance_data["balance"]) / 1e6 - except (ValueError, TypeError): - usdc_balance = 0.0 - - return {"USDC": usdc_balance} - - except Exception as e: - raise ExchangeError(f"Failed to fetch balance: {str(e)}") - - def _parse_order(self, data: Dict[str, Any]) -> Order: - """Parse order data from API response""" - order_id = data.get("id") or data.get("orderID") or "" - - # Try multiple field names for size (CLOB API may use different names) - size = float( - data.get("size") - or data.get("original_size") - or data.get("amount") - or data.get("original_amount") - or 0 - ) - filled = float(data.get("filled") or data.get("matched") or data.get("matched_amount") or 0) - - return Order( - id=order_id, - market_id=data.get("market_id", ""), - outcome=data.get("outcome", ""), - side=OrderSide(data.get("side", "buy").lower()), - price=float(data.get("price", 0)), - size=size, - filled=filled, - status=self._parse_order_status(data.get("status")), - created_at=self._parse_datetime(data.get("created_at")), - updated_at=self._parse_datetime(data.get("updated_at")), - ) - - def _parse_position(self, data: Dict[str, Any]) -> Position: - """Parse position data from API response""" - return Position( - market_id=data.get("market_id", ""), - outcome=data.get("outcome", ""), - size=float(data.get("size", 0)), - average_price=float(data.get("average_price", 0)), - current_price=float(data.get("current_price", 0)), - ) - - def _parse_order_status(self, status: str) -> OrderStatus: - """Convert string status to OrderStatus enum""" - status_map = { - "pending": OrderStatus.PENDING, - "open": OrderStatus.OPEN, - "filled": OrderStatus.FILLED, - "partially_filled": OrderStatus.PARTIALLY_FILLED, - "cancelled": OrderStatus.CANCELLED, - "rejected": OrderStatus.REJECTED, - } - return status_map.get(status, OrderStatus.OPEN) - - def _parse_datetime(self, timestamp: Optional[Any]) -> Optional[datetime]: - """Parse datetime from various formats""" - if not timestamp: - return None - - if isinstance(timestamp, datetime): - return timestamp - - try: - if isinstance(timestamp, (int, float)): - return datetime.fromtimestamp(timestamp) - return datetime.fromisoformat(str(timestamp)) - except (ValueError, TypeError): - return None - - def get_websocket(self) -> PolymarketWebSocket: - """ - Get WebSocket instance for real-time orderbook updates. - - The WebSocket automatically updates the exchange's mid-price cache - when orderbook data is received. - - Returns: - PolymarketWebSocket instance - - Example: - ws = exchange.get_websocket() - await ws.watch_orderbook(asset_id, callback) - ws.start() - """ - if self._ws is None: - self._ws = PolymarketWebSocket( - config={"verbose": self.verbose, "auto_reconnect": True}, exchange=self - ) - return self._ws - - def get_user_websocket(self) -> PolymarketUserWebSocket: - """ - Get User WebSocket instance for real-time trade/fill notifications. - - Requires CLOB client to be initialized (private key required). - - Returns: - PolymarketUserWebSocket instance - - Example: - user_ws = exchange.get_user_websocket() - user_ws.on_trade(lambda trade: print(f"Fill: {trade.size} @ {trade.price}")) - user_ws.start() - """ - if not self._clob_client: - raise AuthenticationError( - "CLOB client not initialized. Private key required for user WebSocket." - ) - - if self._user_ws is None: - # Get API credentials from CLOB client - creds = self._clob_client.creds - if not creds: - raise AuthenticationError("API credentials not available") - - self._user_ws = PolymarketUserWebSocket( - api_key=creds.api_key, - api_secret=creds.api_secret, - api_passphrase=creds.api_passphrase, - verbose=self.verbose, - ) - return self._user_ws - - # ------------------------------------------------------------------------- - - # ---------- polymarket_fetcher ---------- - - def _ensure_market(self, market: Market | str) -> Market: - if isinstance(market, Market): - return market - fetched = self.fetch_market(market) - if not fetched: - raise MarketNotFound(f"Market {market} not found") - return fetched - - @staticmethod - def _extract_token_ids(market: Market) -> List[str]: - raw_ids = market.metadata.get("clobTokenIds", []) - if isinstance(raw_ids, str): - try: - raw_ids = json.loads(raw_ids) - except json.JSONDecodeError: - raw_ids = [raw_ids] - return [str(token_id) for token_id in raw_ids if token_id] - - def _lookup_token_id(self, market: Market, outcome: int | str | None) -> str: - token_ids = self._extract_token_ids(market) - if not token_ids: - raise ExchangeError("Cannot fetch price history without token IDs in metadata.") - - if outcome is None: - outcome_index = 0 - elif isinstance(outcome, int): - outcome_index = outcome - else: - try: - outcome_index = market.outcomes.index(outcome) - except ValueError as err: - raise ExchangeError(f"Outcome {outcome} not found in market {market.id}") from err - - if outcome_index < 0 or outcome_index >= len(token_ids): - raise ExchangeError( - f"Outcome index {outcome_index} out of range for market {market.id}" - ) - - return token_ids[outcome_index] - - def fetch_price_history( - self, - market: Market | str, - *, - outcome: int | str | None = None, - interval: Literal["1m", "1h", "6h", "1d", "1w", "max"] = "1m", - fidelity: int = 10, - as_dataframe: bool = False, - ) -> List[PricePoint] | pd.DataFrame: - if interval not in self.SUPPORTED_INTERVALS: - raise ValueError( - f"Unsupported interval '{interval}'. Pick from {self.SUPPORTED_INTERVALS}." - ) - - market_obj = self._ensure_market(market) - token_id = self._lookup_token_id(market_obj, outcome) - - params = { - "market": token_id, - "interval": interval, - "fidelity": fidelity, - } - - @self._retry_on_failure - def _fetch() -> List[Dict[str, Any]]: - resp = requests.get(self.PRICES_HISTORY_URL, params=params, timeout=self.timeout) - resp.raise_for_status() - payload = resp.json() - history = payload.get("history", []) - if not isinstance(history, list): - raise ExchangeError("Invalid response: 'history' must be a list.") - return history - - history = _fetch() - points = self._parse_history(history) - - if as_dataframe: - data = { - "timestamp": [p.timestamp for p in points], - "price": [p.price for p in points], - } - return pd.DataFrame(data).sort_values("timestamp").reset_index(drop=True) - - return points - - def _collect_paginated( - self, - fetch_page: Callable[[int, int], List[Any]], - *, - total_limit: int, - initial_offset: int = 0, - page_size: int = 500, - dedup_key: Callable[[Any], Any] | None = None, - log: bool | None = False, - ) -> List[Any]: - if total_limit <= 0: - return [] - - results: List[Any] = [] - current_offset = int(initial_offset) - total_limit = int(total_limit) - page_size = max(1, int(page_size)) - - seen: set[Any] = set() if dedup_key else set() - - while len(results) < total_limit: - remaining = total_limit - len(results) - page_limit = min(page_size, remaining) - - if log: - print("current-offset:", current_offset) - print("page_limit:", page_limit) - print("----------") - - page = fetch_page(current_offset, page_limit) - - if not page: - break - - if dedup_key: - new_items: List[Any] = [] - for item in page: - key = dedup_key(item) - if key in seen: - continue - seen.add(key) - new_items.append(item) - - if not new_items: - break - - results.extend(new_items) - else: - results.extend(page) - - current_offset += len(page) - - if len(page) < page_limit: - break - - if len(results) > total_limit: - results = results[:total_limit] - - return results - - def search_markets( - self, - *, - # Gamma-side - limit: int = 200, - offset: int = 0, - order: str | None = "id", - ascending: bool | None = False, - closed: bool | None = False, - tag_id: int | None = None, - ids: Sequence[int] | None = None, - slugs: Sequence[str] | None = None, - clob_token_ids: Sequence[str] | None = None, - condition_ids: Sequence[str] | None = None, - market_maker_addresses: Sequence[str] | None = None, - liquidity_num_min: float | None = None, - liquidity_num_max: float | None = None, - volume_num_min: float | None = None, - volume_num_max: float | None = None, - start_date_min: datetime | None = None, - start_date_max: datetime | None = None, - end_date_min: datetime | None = None, - end_date_max: datetime | None = None, - related_tags: bool | None = None, - cyom: bool | None = None, - uma_resolution_status: str | None = None, - game_id: str | None = None, - sports_market_types: Sequence[str] | None = None, - rewards_min_size: float | None = None, - question_ids: Sequence[str] | None = None, - include_tag: bool | None = None, - extra_params: Dict[str, Any] | None = None, - # Client-side - query: str | None = None, - keywords: Sequence[str] | None = None, - binary: bool | None = None, - min_liquidity: float = 0.0, - categories: Sequence[str] | None = None, - outcomes: Sequence[str] | None = None, - predicate: Callable[[Market], bool] | None = None, - # Log - log: bool | None = False, - ) -> List[Market]: - # ---------- 0) Pre-process ---------- - total_limit = int(limit) - if total_limit <= 0: - return [] - - initial_offset = max(0, int(offset)) - default_page_size_markets = 200 - page_size = min(default_page_size_markets, total_limit) - - def _dt(v: datetime | None) -> str | None: - return v.isoformat() if isinstance(v, datetime) else None - - def _lower_list(values: Sequence[str] | None) -> List[str]: - return [v.lower() for v in values] if values else [] - - query_lower = query.lower() if query else None - keyword_lowers = _lower_list(keywords) - category_lowers = _lower_list(categories) - outcome_lowers = _lower_list(outcomes) - - # ---------- 1) Gamma-side params ---------- - gamma_params: Dict[str, Any] = {} - - if order is not None: - gamma_params["order"] = order - if ascending is not None: - gamma_params["ascending"] = ascending - - if closed is not None: - gamma_params["closed"] = closed - if tag_id is not None: - gamma_params["tag_id"] = tag_id - - if ids: - gamma_params["id"] = list(ids) - if slugs: - gamma_params["slug"] = list(slugs) - if clob_token_ids: - gamma_params["clob_token_ids"] = list(clob_token_ids) - if condition_ids: - gamma_params["condition_ids"] = list(condition_ids) - if market_maker_addresses: - gamma_params["market_maker_address"] = list(market_maker_addresses) - - if liquidity_num_min is not None: - gamma_params["liquidity_num_min"] = liquidity_num_min - if liquidity_num_max is not None: - gamma_params["liquidity_num_max"] = liquidity_num_max - if volume_num_min is not None: - gamma_params["volume_num_min"] = volume_num_min - if volume_num_max is not None: - gamma_params["volume_num_max"] = volume_num_max - - if v := _dt(start_date_min): - gamma_params["start_date_min"] = v - if v := _dt(start_date_max): - gamma_params["start_date_max"] = v - if v := _dt(end_date_min): - gamma_params["end_date_min"] = v - if v := _dt(end_date_max): - gamma_params["end_date_max"] = v - - if related_tags is not None: - gamma_params["related_tags"] = related_tags - if cyom is not None: - gamma_params["cyom"] = cyom - if uma_resolution_status is not None: - gamma_params["uma_resolution_status"] = uma_resolution_status - if game_id is not None: - gamma_params["game_id"] = game_id - if sports_market_types: - gamma_params["sports_market_types"] = list(sports_market_types) - if rewards_min_size is not None: - gamma_params["rewards_min_size"] = rewards_min_size - if question_ids: - gamma_params["question_ids"] = list(question_ids) - if include_tag is not None: - gamma_params["include_tag"] = include_tag - if extra_params: - gamma_params.update(extra_params) - - # ---------- 2) Gamma pagination via helper ---------- - @self._retry_on_failure - def _fetch_page(offset_: int, limit_: int) -> List[Market]: - params = { - **gamma_params, - "limit": limit_, - "offset": offset_, - } - resp = requests.get( - f"{self.BASE_URL}/markets", - params=params, - timeout=self.timeout, - ) - resp.raise_for_status() - raw = resp.json() - if not isinstance(raw, list): - raise ExchangeError("Gamma /markets response must be a list.") - return [self._parse_market(m) for m in raw] - - gamma_results: List[Market] = self._collect_paginated( - _fetch_page, - total_limit=total_limit, - initial_offset=initial_offset, - page_size=page_size, - dedup_key=None, - log=log, - ) - - # ---------- 3) Client-side filtering ---------- - filtered: List[Market] = [] - - for m in gamma_results: - if binary is not None and m.is_binary != binary: - continue - if m.liquidity < min_liquidity: - continue - if outcome_lowers: - outs = [o.lower() for o in m.outcomes] - if not all(x in outs for x in outcome_lowers): - continue - if category_lowers: - cats = self._extract_categories(m) - if not cats or not any(c in cats for c in category_lowers): - continue - if query_lower or keyword_lowers: - text = self._build_search_text(m) - if query_lower and query_lower not in text: - continue - if any(k not in text for k in keyword_lowers): - continue - if predicate and not predicate(m): - continue - filtered.append(m) - - if len(filtered) > total_limit: - filtered = filtered[:total_limit] - - return filtered - - def fetch_public_trades( - self, - market: Market | str | None = None, - *, - limit: int = 100, - offset: int = 0, - event_id: int | None = None, - user: str | None = None, - side: Literal["BUY", "SELL"] | None = None, - taker_only: bool = True, - filter_type: Literal["CASH", "TOKENS"] | None = None, - filter_amount: float | None = None, - as_dataframe: bool = False, - log: bool = False, - ) -> List[PublicTrade] | pd.DataFrame: - total_limit = int(limit) - if total_limit <= 0: - return [] - - if offset < 0 or offset > 10000: - raise ValueError("offset must be between 0 and 10000") - - initial_offset = int(offset) - default_page_size_trades = 500 - page_size = min(default_page_size_trades, total_limit) - - # ---------- condition_id resolve ---------- - condition_id: str | None = None - if isinstance(market, Market): - condition_id = str(market.metadata.get("conditionId", market.id)) - elif isinstance(market, str): - condition_id = market - - base_params: Dict[str, Any] = { - "takerOnly": "true" if taker_only else "false", - } - - if condition_id: - base_params["market"] = condition_id - if event_id is not None: - base_params["eventId"] = event_id - if user: - base_params["user"] = user - if side: - base_params["side"] = side - - if filter_type or filter_amount is not None: - if not filter_type or filter_amount is None: - raise ValueError("filter_type and filter_amount must be provided together") - base_params["filterType"] = filter_type - base_params["filterAmount"] = filter_amount - - # ---------- pagination via helper ---------- - @self._retry_on_failure - def _fetch_page(offset_: int, limit_: int) -> List[Dict[str, Any]]: - params = { - **base_params, - "limit": limit_, - "offset": offset_, - } - - resp = requests.get( - f"{self.DATA_API_URL}/trades", - params=params, - timeout=self.timeout, - ) - resp.raise_for_status() - data = resp.json() - if not isinstance(data, list): - raise ExchangeError("Data-API /trades response must be a list.") - return data - - def _dedup_key(row: Dict[str, Any]) -> tuple[Any, ...]: - # transactionHash + timestamp + side + asset + size + price - return (row.get("transactionHash"), row.get("outcomeIndex")) - - raw_trades: List[Dict[str, Any]] = self._collect_paginated( - _fetch_page, - total_limit=total_limit, - initial_offset=initial_offset, - page_size=page_size, - dedup_key=_dedup_key, - log=log, - ) - - # ---------- Dict -> PublicTrade ---------- - trades: List[PublicTrade] = [] - - for row in raw_trades[:total_limit]: - ts = row.get("timestamp") - if isinstance(ts, (int, float)): - ts_dt = datetime.fromtimestamp(int(ts), tz=timezone.utc) - elif isinstance(ts, str) and ts.isdigit(): - ts_dt = datetime.fromtimestamp(int(ts), tz=timezone.utc) - else: - ts_dt = datetime.fromtimestamp(0, tz=timezone.utc) - - trades.append( - PublicTrade( - proxy_wallet=row.get("proxyWallet", ""), - side=row.get("side", ""), - asset=row.get("asset", ""), - condition_id=row.get("conditionId", ""), - size=float(row.get("size", 0) or 0), - price=float(row.get("price", 0) or 0), - timestamp=ts_dt, - title=row.get("title"), - slug=row.get("slug"), - icon=row.get("icon"), - event_slug=row.get("eventSlug"), - outcome=row.get("outcome"), - outcome_index=row.get("outcomeIndex"), - name=row.get("name"), - pseudonym=row.get("pseudonym"), - bio=row.get("bio"), - profile_image=row.get("profileImage"), - profile_image_optimized=row.get("profileImageOptimized"), - transaction_hash=row.get("transactionHash"), - ) - ) - - if not as_dataframe: - return trades - - # ---------- as_dataframe=True: Convert to DataFrame---------- - - df = pd.DataFrame( - [ - { - "timestamp": t.timestamp, - "side": t.side, - "asset": t.asset, - "condition_id": t.condition_id, - "size": t.size, - "price": t.price, - "proxy_wallet": t.proxy_wallet, - "title": t.title, - "slug": t.slug, - "event_slug": t.event_slug, - "outcome": t.outcome, - "outcome_index": t.outcome_index, - "name": t.name, - "pseudonym": t.pseudonym, - "bio": t.bio, - "profile_image": t.profile_image, - "profile_image_optimized": t.profile_image_optimized, - "transaction_hash": t.transaction_hash, - } - for t in trades - ] - ) - - return df.sort_values("timestamp").reset_index(drop=True) - - @staticmethod - def _extract_categories(market: Market) -> List[str]: - buckets: List[str] = [] - meta = market.metadata - - raw_cat = meta.get("category") - if isinstance(raw_cat, str): - buckets.append(raw_cat.lower()) - - for key in ("categories", "topics"): - raw = meta.get(key) - if isinstance(raw, str): - buckets.append(raw.lower()) - elif isinstance(raw, Iterable): - buckets.extend(str(item).lower() for item in raw) - - return buckets - - @staticmethod - def _build_search_text(market: Market) -> str: - meta = market.metadata - - base_fields = [ - market.question or "", - meta.get("description", ""), - ] - - extra_keys = [ - "slug", - "category", - "subtitle", - "seriesSlug", - "series", - "seriesTitle", - "seriesDescription", - "tags", - "topics", - "categories", - ] - - extras: List[str] = [] - for key in extra_keys: - value = meta.get(key) - if value is None: - continue - if isinstance(value, str): - extras.append(value) - elif isinstance(value, Iterable): - extras.extend(str(item).lower() for item in value) - else: - extras.append(str(value)) - - return " ".join(str(field) for field in (base_fields + extras)).lower() - - @staticmethod - def _parse_history(history: Iterable[Dict[str, Any]]) -> List[PricePoint]: - parsed: List[PricePoint] = [] - for row in history: - t = row.get("t") - p = row.get("p") - if t is None or p is None: - continue - parsed.append( - PricePoint( - timestamp=datetime.fromtimestamp(int(t), tz=timezone.utc), - price=float(p), - raw=row, - ) - ) - return sorted(parsed, key=lambda item: item.timestamp) - - def get_tag_by_slug(self, slug: str) -> Tag: - if not slug: - raise ValueError("slug must be a non-empty string") - - url = f"{self.BASE_URL}/tags/slug/{slug}" - - @self._retry_on_failure - def _fetch() -> dict: - resp = requests.get(url, timeout=self.timeout) - resp.raise_for_status() - data = resp.json() - if not isinstance(data, dict): - raise ExchangeError("Gamma get_tag_by_slug response must be an object.") - return data - - data = _fetch() - - return Tag( - id=str(data.get("id", "")), - label=data.get("label"), - slug=data.get("slug"), - force_show=data.get("forceShow"), - force_hide=data.get("forceHide"), - is_carousel=data.get("isCarousel"), - published_at=data.get("publishedAt"), - created_at=data.get("createdAt"), - updated_at=data.get("UpdatedAt") if "UpdatedAt" in data else data.get("updatedAt"), - raw=data, - ) diff --git a/dr_manhattan/exchanges/polymarket/README.md b/dr_manhattan/exchanges/polymarket/README.md new file mode 100644 index 0000000..fe9eaef --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/README.md @@ -0,0 +1,257 @@ +# Polymarket Exchange + +Unified Python client for the Polymarket prediction market platform. +Built as a mixin-based package — all methods are accessible directly on the `Polymarket` class. + +```python +from dr_manhattan.exchanges import Polymarket + +pm = Polymarket() +market = pm.search_markets(query="bitcoin")[0] +pm.get_price(market) +``` + +--- + +## Architecture + +``` +polymarket/ +├── __init__.py Polymarket class (combines all mixins) +├── polymarket_core.py Constants, config, dataclasses, shared helpers +├── polymarket_gamma.py Gamma API — market discovery & metadata +├── polymarket_clob.py CLOB API — orderbook, pricing, orders, positions +├── polymarket_data.py Data API — trades, leaderboard, analytics +├── polymarket_ctf.py CTF contract — split, merge, redeem tokens +├── polymarket_ws.py WebSocket — orderbook & user streams +├── polymarket_ws_ext.py WebSocket — sports & RTDS streams +├── polymarket_builder.py Builder/operator utilities +├── polymarket_operator.py Operator management +└── polymarket_bridge.py Cross-chain bridge helpers +``` + +--- + +## Dataclasses + +Defined in `polymarket_core.py`: + +| Class | Description | +|-------|-------------| +| `PublicTrade` | A single trade from the Data API — wallet, side, asset, price, size, timestamp, market metadata | +| `PricePoint` | A price history data point — timestamp + price | +| `Tag` | A market/event tag — id, label, slug, visibility flags | + +Imported from `models/`: + +| Class | Description | +|-------|-------------| +| `Market` | Core market object — id (condition_id), question, outcomes, prices, metadata (gamma_id, token_ids, slug) | +| `Order` | Order object — id, status, side, price, size, timestamps | +| `Position` | Position object — market, outcome, size, avg price, P&L | + +--- + +## Market Identifiers + +Most market-related methods accept `Market | str`. +When a string is passed, it is auto-detected: + +| Format | Example | Detection | +|--------|---------|-----------| +| Condition ID | `"0x3fd189cac928..."` | Starts with `0x`, 66 chars | +| Gamma ID | `"630806"` | Digits only, < 20 chars | +| Token ID | `"104698087530604..."` | Digits only, ≥ 20 chars | +| Slug | `"will-trump-win..."` | Everything else | + +A `Market` object contains all of these internally — passing one avoids extra API calls. + +--- + +## Files + +### `polymarket_core.py` — Foundation +Constants, initialization, shared utilities. + +| Method | Input | Output | +|--------|-------|--------| +| `normalize_token` | `token: str` | `str` | +| `parse_market_identifier` | `identifier: str` | `str` | + +Internal helpers: `_resolve_condition_id`, `_resolve_gamma_id`, `_resolve_token_id`, +`_retry_on_failure`, `_collect_paginated`, `_ensure_market`. + +--- + +### `polymarket_gamma.py` — Market Discovery +Gamma API for browsing markets, events, tags, series, and sports. + +| Method | Input | Output | +|--------|-------|--------| +| `fetch_markets` | `params?: Dict` | `list[Market]` | +| `fetch_market` | `market: Market \| str` | `Market` | +| `fetch_markets_by_slug` | `slug_or_url: str` | `list[Market]` | +| `search_markets` | `query?: str` + filters | `list[Market]` | +| `find_tradeable_market` | `binary?: bool` | `Market` | +| `find_crypto_hourly_market` | `token_symbol?: str` | `tuple[Market, ...]` | +| `fetch_market_tags` | `market: Market \| str` | `list[Dict]` | +| `fetch_events` | `limit?, offset?` | `list[Dict]` | +| `fetch_event` | `event_id: str` | `Dict` | +| `fetch_event_by_slug` | `slug: str` | `Dict` | +| `fetch_event_tags` | `event_id: str` | `list[Dict]` | +| `fetch_tags` | `limit?, offset?` | `list[Dict]` | +| `fetch_tag_by_id` | `tag_id: str` | `Dict` | +| `get_tag_by_slug` | `slug: str` | `Tag` | +| `fetch_series` | `limit?, offset?` | `list[Dict]` | +| `fetch_series_by_id` | `series_id: str` | `Dict` | +| `fetch_sports_market_types` | — | `list[Dict]` | +| `fetch_sports_metadata` | — | `Dict` | +| `fetch_supported_assets` | — | `list[Dict]` | +| `get_gamma_status` | — | `Dict` | + +--- + +### `polymarket_clob.py` — Orderbook & Trading +CLOB API for pricing, orderbooks, orders, positions, and price history. + +| Method | Input | Output | +|--------|-------|--------| +| `get_price` | `market: Market \| str`, `outcome?` | `Dict` | +| `get_midpoint` | `market: Market \| str`, `outcome?` | `Dict` | +| `get_orderbook` | `market: Market \| str`, `outcome?` | `Dict` | +| `fetch_token_ids` | `market: Market \| str` | `list[str]` | +| `fetch_price_history` | `market: Market \| str`, `interval?` | `list[PricePoint]` | +| `calculate_spread` | `market: Market` | `float` | +| `calculate_expected_value` | `market: Market`, `outcome`, `price` | `float` | +| `get_optimal_order_size` | `market: Market`, `max_size` | `float` | +| `calculate_implied_probability` | `price: float` | `float` | +| `create_order` | `market_id, outcome, side, price, size` | `Order` 🔐 | +| `cancel_order` | `order_id` | `Order` 🔐 | +| `fetch_order` | `order_id` | `Order` 🔐 | +| `fetch_open_orders` | `market_id?` | `list[Order]` 🔐 | +| `fetch_positions` | `market_id?` | `list[Position]` 🔐 | +| `fetch_positions_for_market` | `market: Market` | `list[Position]` 🔐 | +| `fetch_balance` | — | `Dict` 🔐 | +| `get_websocket` | — | `PolymarketWebSocket` | +| `get_user_websocket` | — | `PolymarketUserWebSocket` 🔐 | +| `get_sports_websocket` | — | `PolymarketSportsWebSocket` | +| `get_rtds_websocket` | — | `PolymarketRTDSWebSocket` | + +🔐 = requires private key / wallet configuration + +--- + +### `polymarket_data.py` — Analytics & Public Data +Data API for trades, leaderboards, holdings, and portfolio analytics. + +| Method | Input | Output | +|--------|-------|--------| +| `fetch_public_trades` | `market?: Market \| str`, `limit?` | `list[PublicTrade]` | +| `fetch_leaderboard` | `category?, time_period?, order_by?` | `list[Dict]` | +| `fetch_open_interest` | `market: Market \| str` | `Dict` | +| `fetch_top_holders` | `market: Market \| str`, `limit?` | `list[Dict]` | +| `fetch_user_activity` | `address: str`, `limit?` | `list[Dict]` | +| `fetch_closed_positions` | `address: str`, `limit?` | `list[Dict]` | +| `fetch_positions_data` | `address: str`, `limit?` | `list[Dict]` | +| `fetch_portfolio_value` | `address: str` | `Dict` | +| `fetch_traded_count` | `address: str` | `Dict` | +| `fetch_live_volume` | `event_id: int` | `Dict` | +| `fetch_builder_leaderboard` | `limit?, period?` | `list[Dict]` | +| `fetch_builder_volume` | `builder_id: str`, `period?` | `list[Dict]` | + +Supports pagination — pass `limit > 500` and results are auto-fetched across pages. + +--- + +### `polymarket_ctf.py` — On-chain Token Operations +CTF contract interactions for splitting, merging, and redeeming conditional tokens. + +| Method | Input | Output | +|--------|-------|--------| +| `split` | `market: Market \| str`, `amount: float` | `Dict` 🔐 | +| `merge` | `market: Market \| str`, `amount: float` | `Dict` 🔐 | +| `redeem` | `market: Market \| str` | `Dict` 🔐 | +| `redeem_all` | — | `list[Dict]` 🔐 | +| `fetch_redeemable_positions` | — | `list[Dict]` 🔐 | + +All methods require wallet (private key + funder/Safe address). + +--- + +### `polymarket_ws.py` — Core WebSockets +Real-time orderbook and user event streams. + +| Class | Description | +|-------|-------------| +| `PolymarketWebSocket` | Orderbook updates — subscribe to token_id channels | +| `PolymarketUserWebSocket` | User-specific events — orders, trades, positions 🔐 | + +--- + +### `polymarket_ws_ext.py` — Extended WebSockets +Sports and real-time data streams. + +| Class | Description | +|-------|-------------| +| `PolymarketSportsWebSocket` | Live sports event updates | +| `PolymarketRTDSWebSocket` | Real-Time Data Service stream | + +--- + +### `polymarket_builder.py` — Builder Utilities +Helper methods for building complex operations. + +### `polymarket_operator.py` — Operator Management +Operator approval and management for the CLOB client. + +### `polymarket_bridge.py` — Bridge Helpers +Cross-chain deposit/withdrawal utilities. + +--- + +## Quick Examples + +```python +from dr_manhattan.exchanges import Polymarket + +pm = Polymarket() + +# Search and inspect +markets = pm.search_markets(query="bitcoin", limit=5) +market = markets[0] + +# All of these work the same: +pm.get_price(market) # Market object +pm.get_price("0x3fd189cac928...") # condition_id +pm.get_price("104698087530604...") # token_id +pm.get_price("will-bitcoin-go-up") # slug + +# Yes/No pricing +pm.get_price(market, outcome="Yes") +pm.get_price(market, outcome="No") + +# Analytics +pm.fetch_open_interest(market) +pm.fetch_top_holders(market, limit=10) +pm.fetch_public_trades(market=market, limit=100) + +# Leaderboard +pm.fetch_leaderboard(category="CRYPTO", time_period="WEEK", limit=10) + +# User analytics (by wallet address) +pm.fetch_portfolio_value("0x1234...") +pm.fetch_user_activity("0x1234...", limit=50) + +# Pagination (auto-handles pages) +trades = pm.fetch_public_trades(limit=2000) # fetches 4 pages of 500 +``` + +--- + +## Stats + +- **Total methods**: 76 +- **Public (no auth)**: 60 +- **Auth required**: 16 +- **Lines of code**: ~4,900 +- **Test coverage**: 64/64 public methods verified diff --git a/dr_manhattan/exchanges/polymarket/__init__.py b/dr_manhattan/exchanges/polymarket/__init__.py new file mode 100644 index 0000000..b487216 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/__init__.py @@ -0,0 +1,28 @@ +"""Polymarket exchange - unified API""" + +from __future__ import annotations + +from ...base.exchange import Exchange +from .polymarket_bridge import PolymarketBridge +from .polymarket_clob import PolymarketCLOB +from .polymarket_core import PolymarketCore +from .polymarket_core import PricePoint as PricePoint +from .polymarket_core import PublicTrade as PublicTrade +from .polymarket_core import Tag as Tag +from .polymarket_ctf import PolymarketCTF +from .polymarket_data import PolymarketData +from .polymarket_gamma import PolymarketGamma + + +class Polymarket( + PolymarketCore, + PolymarketCLOB, + PolymarketGamma, + PolymarketData, + PolymarketCTF, + PolymarketBridge, + Exchange, +): + """Polymarket exchange implementation - all APIs unified via mixins""" + + pass diff --git a/dr_manhattan/exchanges/polymarket/polymarket_bridge.py b/dr_manhattan/exchanges/polymarket/polymarket_bridge.py new file mode 100644 index 0000000..06e1d6d --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_bridge.py @@ -0,0 +1,32 @@ +from __future__ import annotations + +from typing import Dict, List + +import requests + + +class PolymarketBridge: + """Bridge API mixin: cross-chain asset transfers (read-only).""" + + BRIDGE_URL = "https://bridge.polymarket.com" + + def fetch_supported_assets(self) -> List[Dict]: + """ + Fetch supported bridge assets. + + Returns: + List of supported asset dictionaries + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BRIDGE_URL}/supported-assets", timeout=self.timeout) + resp.raise_for_status() + data = resp.json() + if isinstance(data, list): + return data + if isinstance(data, dict): + return data.get("supportedAssets", []) + return [] + + return _fetch() diff --git a/dr_manhattan/exchanges/polymarket/polymarket_builder.py b/dr_manhattan/exchanges/polymarket/polymarket_builder.py new file mode 100644 index 0000000..5a86807 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_builder.py @@ -0,0 +1,257 @@ +"""Polymarket exchange implementation using Builder profile. + +This module provides a Polymarket exchange that uses Builder profile +credentials (api_key, api_secret, passphrase) instead of private keys. + +Security Benefits: +- No private key exposure to the server +- Users can revoke API credentials at any time from Polymarket +- Credentials are scoped to trading operations only +""" + +from datetime import datetime +from typing import Any, Dict, Optional + +from py_builder_signing_sdk.config import BuilderApiKeyCreds, BuilderConfig +from py_clob_client.client import ClobClient +from py_clob_client.clob_types import AssetType, BalanceAllowanceParams, OrderArgs, OrderType + +from ...base.errors import AuthenticationError, InvalidOrder +from ...models.order import Order, OrderSide, OrderStatus, OrderTimeInForce +from . import Polymarket + + +class PolymarketBuilder(Polymarket): + """Polymarket exchange using Builder profile for authentication. + + This class extends Polymarket to use Builder API credentials instead of + private keys. This is the recommended approach for remote/server deployments + where storing private keys is undesirable. + + Config: + api_key: Polymarket Builder API key + api_secret: Polymarket Builder API secret + api_passphrase: Polymarket Builder passphrase + chain_id: Polygon chain ID (default: 137) + + Example: + exchange = PolymarketBuilder({ + 'api_key': 'your_api_key', + 'api_secret': 'your_api_secret', + 'api_passphrase': 'your_passphrase', + }) + """ + + def __init__(self, config: Optional[Dict[str, Any]] = None): + """Initialize Polymarket with Builder profile credentials.""" + # Don't call parent __init__ directly - it tries to use private_key + # Instead, do minimal Exchange init and our own setup + from ...base.exchange import Exchange + + Exchange.__init__(self, config) + self._ws = None + self._user_ws = None + self.private_key = None + self.funder = None + self._clob_client = None + self._address = None + + # Extract Builder credentials + self._api_key = self.config.get("api_key") + self._api_secret = self.config.get("api_secret") + self._api_passphrase = self.config.get("api_passphrase") + + if not all([self._api_key, self._api_secret, self._api_passphrase]): + raise AuthenticationError( + "Builder profile requires api_key, api_secret, and api_passphrase" + ) + + self._initialize_builder_client() + + def _initialize_builder_client(self): + """Initialize CLOB client with Builder profile credentials.""" + try: + # Create Builder credentials + builder_creds = BuilderApiKeyCreds( + key=self._api_key, + secret=self._api_secret, + passphrase=self._api_passphrase, + ) + + # Create Builder config + builder_config = BuilderConfig(local_builder_creds=builder_creds) + + if not builder_config.is_valid(): + raise AuthenticationError("Invalid Builder profile credentials") + + # Initialize CLOB client with Builder config + chain_id = self.config.get("chain_id", 137) + self._clob_client = ClobClient( + host=self.CLOB_URL, + chain_id=chain_id, + builder_config=builder_config, + ) + + # Verify Builder auth is available + if not self._clob_client.can_builder_auth(): + raise AuthenticationError("Builder authentication not available") + + except AuthenticationError: + raise + except Exception as e: + raise AuthenticationError(f"Failed to initialize Builder client: {e}") + + def create_order( + self, + market_id: str, + outcome: str, + side: OrderSide, + price: float, + size: float, + params: Optional[Dict[str, Any]] = None, + time_in_force: OrderTimeInForce = OrderTimeInForce.GTC, + ) -> Order: + """Create order on Polymarket CLOB using Builder profile.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + if not self._clob_client.can_builder_auth(): + raise AuthenticationError("Builder authentication not available.") + + token_id = params.get("token_id") if params else None + if not token_id: + raise InvalidOrder("token_id required in params") + + # Map our OrderTimeInForce to py_clob_client OrderType + order_type_map = { + OrderTimeInForce.GTC: OrderType.GTC, + OrderTimeInForce.FOK: OrderType.FOK, + OrderTimeInForce.IOC: OrderType.GTD, + } + clob_order_type = order_type_map.get(time_in_force, OrderType.GTC) + + try: + # Create and sign order using Builder + order_args = OrderArgs( + token_id=token_id, + price=float(price), + size=float(size), + side=side.value.upper(), + ) + + signed_order = self._clob_client.create_order(order_args) + result = self._clob_client.post_order(signed_order, clob_order_type) + + # Parse result + order_id = result.get("orderID", "") if isinstance(result, dict) else str(result) + status_str = result.get("status", "LIVE") if isinstance(result, dict) else "LIVE" + + status_map = { + "LIVE": OrderStatus.OPEN, + "MATCHED": OrderStatus.FILLED, + "CANCELLED": OrderStatus.CANCELLED, + } + + return Order( + id=order_id, + market_id=market_id, + outcome=outcome, + side=side, + price=price, + size=size, + filled=0, + status=status_map.get(status_str, OrderStatus.OPEN), + created_at=datetime.now(), + updated_at=datetime.now(), + time_in_force=time_in_force, + ) + + except Exception as e: + raise InvalidOrder(f"Order placement failed: {str(e)}") + + def cancel_order(self, order_id: str, market_id: Optional[str] = None) -> Order: + """Cancel order on Polymarket using Builder profile.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + if not self._clob_client.can_builder_auth(): + raise AuthenticationError("Builder authentication not available.") + + try: + result = self._clob_client.cancel(order_id) + if isinstance(result, dict): + return self._parse_order(result) + return Order( + id=order_id, + market_id=market_id or "", + outcome="", + side=OrderSide.BUY, + price=0, + size=0, + filled=0, + status=OrderStatus.CANCELLED, + created_at=datetime.now(), + updated_at=datetime.now(), + ) + except Exception as e: + raise InvalidOrder(f"Failed to cancel order {order_id}: {str(e)}") + + def fetch_balance(self) -> Dict[str, float]: + """Fetch account balance from Polymarket using Builder profile.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + if not self._clob_client.can_builder_auth(): + raise AuthenticationError("Builder authentication not available.") + + try: + # Fetch USDC (collateral) balance + params = BalanceAllowanceParams(asset_type=AssetType.COLLATERAL) + balance_data = self._clob_client.get_balance_allowance(params=params) + + # Extract balance from response + usdc_balance = 0.0 + if isinstance(balance_data, dict) and "balance" in balance_data: + try: + # Balance is returned as a string in wei (6 decimals for USDC) + usdc_balance = float(balance_data["balance"]) / 1e6 + except (ValueError, TypeError): + usdc_balance = 0.0 + + return {"USDC": usdc_balance} + + except Exception as e: + raise AuthenticationError(f"Failed to fetch balance: {str(e)}") + + def fetch_open_orders( + self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None + ) -> list[Order]: + """Fetch open orders using Builder profile.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + if not self._clob_client.can_builder_auth(): + raise AuthenticationError("Builder authentication not available.") + + try: + response = self._clob_client.get_orders() + + if isinstance(response, list): + orders = response + elif isinstance(response, dict) and "data" in response: + orders = response["data"] + else: + return [] + + if not orders: + return [] + + # Filter by market_id if provided + if market_id: + orders = [o for o in orders if o.get("market") == market_id] + + return [self._parse_order(order) for order in orders] + except Exception as e: + if self.verbose: + print(f"Warning: Failed to fetch open orders: {e}") + return [] diff --git a/dr_manhattan/exchanges/polymarket/polymarket_clob.py b/dr_manhattan/exchanges/polymarket/polymarket_clob.py new file mode 100644 index 0000000..f04253e --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_clob.py @@ -0,0 +1,725 @@ +from __future__ import annotations + +import json +import logging +import traceback +from datetime import datetime +from typing import Any, Dict, List, Literal, Optional + +import pandas as pd +import requests +from py_clob_client.clob_types import AssetType, BalanceAllowanceParams, OrderArgs, OrderType + +from ...base.errors import ( + AuthenticationError, + ExchangeError, + InvalidOrder, +) +from ...models.market import Market +from ...models.order import Order, OrderSide, OrderStatus, OrderTimeInForce +from ...models.position import Position +from .polymarket_core import PricePoint +from .polymarket_ws import PolymarketUserWebSocket, PolymarketWebSocket +from .polymarket_ws_ext import PolymarketRTDSWebSocket, PolymarketSportsWebSocket + + +class PolymarketCLOB: + """CLOB API mixin: orderbook, orders, positions, balance, price history, websockets.""" + + def fetch_token_ids(self, market: Market | str) -> list[str]: + """ + Fetch token IDs for a specific market from CLOB API + + The Gamma API doesn't include token IDs, so we need to fetch them + from the CLOB API when we need to trade. + + Based on actual CLOB API response structure. + + Args: + market: Market object or condition_id string + + Returns: + List of token IDs as strings + + Raises: + ExchangeError: If token IDs cannot be fetched + """ + condition_id = self._resolve_condition_id(market) + try: + # Try simplified-markets endpoint + # Response structure: {"data": [{"condition_id": ..., "tokens": [{"token_id": ..., "outcome": ...}]}]} + try: + response = requests.get(f"{self.CLOB_URL}/simplified-markets", timeout=self.timeout) + + if response.status_code == 200: + result = response.json() + + # Check if response has "data" key + markets_list = result.get("data", result if isinstance(result, list) else []) + + # Find the market with matching condition_id + for market in markets_list: + market_id = market.get("condition_id") or market.get("id") + if market_id == condition_id: + # Extract token IDs from tokens array + # Each token is an object: {"token_id": "...", "outcome": "...", "price": ...} + tokens = market.get("tokens", []) + if tokens and isinstance(tokens, list): + # Extract just the token_id strings + token_ids = [] + for token in tokens: + if isinstance(token, dict) and "token_id" in token: + token_ids.append(str(token["token_id"])) + elif isinstance(token, str): + # In case it's already a string + token_ids.append(token) + + if token_ids: + if self.verbose: + print( + f"✓ Found {len(token_ids)} token IDs via simplified-markets" + ) + for i, tid in enumerate(token_ids): + outcome = ( + tokens[i].get("outcome", f"outcome_{i}") + if isinstance(tokens[i], dict) + else f"outcome_{i}" + ) + print(f" [{i}] {outcome}: {tid}") + return token_ids + + # Fallback: check for clobTokenIds + clob_tokens = market.get("clobTokenIds") + if clob_tokens and isinstance(clob_tokens, list): + token_ids = [str(t) for t in clob_tokens] + if self.verbose: + print(f"✓ Found token IDs via clobTokenIds: {token_ids}") + return token_ids + except Exception as e: + if self.verbose: + print(f"simplified-markets failed: {e}") + + # Try sampling-simplified-markets endpoint + try: + response = requests.get( + f"{self.CLOB_URL}/sampling-simplified-markets", timeout=self.timeout + ) + + if response.status_code == 200: + markets_list = response.json() + if not isinstance(markets_list, list): + markets_list = markets_list.get("data", []) + + for market in markets_list: + market_id = market.get("condition_id") or market.get("id") + if market_id == condition_id: + # Extract from tokens array + tokens = market.get("tokens", []) + if tokens and isinstance(tokens, list): + token_ids = [] + for token in tokens: + if isinstance(token, dict) and "token_id" in token: + token_ids.append(str(token["token_id"])) + elif isinstance(token, str): + token_ids.append(token) + + if token_ids: + if self.verbose: + print( + f"✓ Found token IDs via sampling-simplified-markets: {len(token_ids)} tokens" + ) + return token_ids + except Exception as e: + if self.verbose: + print(f"sampling-simplified-markets failed: {e}") + + # Try markets endpoint + try: + response = requests.get(f"{self.CLOB_URL}/markets", timeout=self.timeout) + + if response.status_code == 200: + markets_list = response.json() + if not isinstance(markets_list, list): + markets_list = markets_list.get("data", []) + + for market in markets_list: + market_id = market.get("condition_id") or market.get("id") + if market_id == condition_id: + # Extract from tokens array + tokens = market.get("tokens", []) + if tokens and isinstance(tokens, list): + token_ids = [] + for token in tokens: + if isinstance(token, dict) and "token_id" in token: + token_ids.append(str(token["token_id"])) + elif isinstance(token, str): + token_ids.append(token) + + if token_ids: + if self.verbose: + print( + f"✓ Found token IDs via markets endpoint: {len(token_ids)} tokens" + ) + return token_ids + except Exception as e: + if self.verbose: + print(f"markets endpoint failed: {e}") + + raise ExchangeError( + f"Could not fetch token IDs for market {condition_id} from any CLOB endpoint" + ) + + except requests.RequestException as e: + raise ExchangeError(f"Network error fetching token IDs: {e}") + + def get_price( + self, market: Market | str, side: str = "buy", outcome: int | str = 0 + ) -> Dict[str, Any]: + """ + Fetch price for a single token. + + Args: + market: Market object, token_id string, or condition_id string. + If Market or condition_id, use `outcome` to select Yes(0)/No(1). + side: Order side — "buy" or "sell" (required by API) + outcome: 0/"Yes" for first token, 1/"No" for second (ignored if raw token_id) + + Returns: + Price dictionary with 'price' key + """ + token_id = self._resolve_token_id(market, outcome) + try: + response = requests.get( + f"{self.CLOB_URL}/price", + params={"token_id": token_id, "side": side}, + timeout=self.timeout, + ) + if response.status_code == 200: + return response.json() + return {} + except Exception as e: + if self.verbose: + print(f"Failed to fetch price: {e}") + return {} + + def get_midpoint(self, market: Market | str, outcome: int | str = 0) -> Dict[str, Any]: + """ + Fetch midpoint price for a token. + + Args: + market: Market object, token_id string, or condition_id string. + outcome: 0/"Yes" for first token, 1/"No" for second (ignored if raw token_id) + + Returns: + Midpoint price dictionary + """ + token_id = self._resolve_token_id(market, outcome) + try: + response = requests.get( + f"{self.CLOB_URL}/midpoint", + params={"token_id": token_id}, + timeout=self.timeout, + ) + if response.status_code == 200: + return response.json() + return {} + except Exception as e: + if self.verbose: + print(f"Failed to fetch midpoint: {e}") + return {} + + def get_orderbook(self, market: Market | str, outcome: int | str = 0) -> Dict[str, Any]: + """ + Fetch orderbook for a specific token via REST API. + + Args: + market: Market object, token_id string, or condition_id string. + outcome: 0/"Yes" for first token, 1/"No" for second (ignored if raw token_id) + + Returns: + Dictionary with 'bids' and 'asks' arrays + Each entry: {'price': str, 'size': str} + + Example: + >>> orderbook = exchange.get_orderbook(token_id) + >>> best_bid = float(orderbook['bids'][0]['price']) + >>> best_ask = float(orderbook['asks'][0]['price']) + """ + token_id = self._resolve_token_id(market, outcome) + try: + response = requests.get( + f"{self.CLOB_URL}/book", params={"token_id": token_id}, timeout=self.timeout + ) + + if response.status_code == 200: + return response.json() + + return {"bids": [], "asks": []} + + except Exception as e: + if self.verbose: + print(f"Failed to fetch orderbook: {e}") + return {"bids": [], "asks": []} + + def create_order( + self, + market_id: str, + outcome: str, + side: OrderSide, + price: float, + size: float, + params: Optional[Dict[str, Any]] = None, + time_in_force: OrderTimeInForce = OrderTimeInForce.GTC, + ) -> Order: + """Create order on Polymarket CLOB""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized. Private key required.") + + token_id = params.get("token_id") if params else None + if not token_id: + raise InvalidOrder("token_id required in params") + + # Map our OrderTimeInForce to py_clob_client OrderType + order_type_map = { + OrderTimeInForce.GTC: OrderType.GTC, + OrderTimeInForce.FOK: OrderType.FOK, + OrderTimeInForce.IOC: OrderType.GTD, # py_clob_client uses GTD for IOC behavior + } + clob_order_type = order_type_map.get(time_in_force, OrderType.GTC) + + try: + # Create and sign order + order_args = OrderArgs( + token_id=token_id, + price=float(price), + size=float(size), + side=side.value.upper(), + ) + + signed_order = self._clob_client.create_order(order_args) + result = self._clob_client.post_order(signed_order, clob_order_type) + + # Parse result + order_id = result.get("orderID", "") if isinstance(result, dict) else str(result) + status_str = result.get("status", "LIVE") if isinstance(result, dict) else "LIVE" + + status_map = { + "LIVE": OrderStatus.OPEN, + "MATCHED": OrderStatus.FILLED, + "CANCELLED": OrderStatus.CANCELLED, + } + + return Order( + id=order_id, + market_id=market_id, + outcome=outcome, + side=side, + price=price, + size=size, + filled=0, + status=status_map.get(status_str, OrderStatus.OPEN), + created_at=datetime.now(), + updated_at=datetime.now(), + time_in_force=time_in_force, + ) + + except Exception as e: + raise InvalidOrder(f"Order placement failed: {str(e)}") + + def cancel_order(self, order_id: str, market_id: Optional[str] = None) -> Order: + """Cancel order on Polymarket""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized. Private key required.") + + try: + result = self._clob_client.cancel(order_id) + if isinstance(result, dict): + return self._parse_order(result) + return Order( + id=order_id, + market_id=market_id or "", + outcome="", + side=OrderSide.BUY, + price=0, + size=0, + filled=0, + status=OrderStatus.CANCELLED, + created_at=datetime.now(), + updated_at=datetime.now(), + ) + except Exception as e: + raise InvalidOrder(f"Failed to cancel order {order_id}: {str(e)}") + + def fetch_order(self, order_id: str, market_id: Optional[str] = None) -> Order: + """Fetch order details""" + data = self._request("GET", f"/orders/{order_id}") + return self._parse_order(data) + + def fetch_open_orders( + self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None + ) -> list[Order]: + """ + Fetch open orders using CLOB client + + Args: + market_id: Can be either the numeric market ID or the hex conditionId. + If numeric, we filter by exact match. If hex (0x...), we use it directly. + """ + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized. Private key required.") + + try: + # Use CLOB client's get_orders method + response = self._clob_client.get_orders() + + # Response is a list directly + if isinstance(response, list): + orders = response + elif isinstance(response, dict) and "data" in response: + orders = response["data"] + else: + if self.verbose: + print(f"Debug: Unexpected response format: {type(response)}") + return [] + + if not orders: + return [] + + # Filter by market_id if provided + # Note: CLOB orders use hex conditionId (0x...) in the 'market' field + if market_id: + orders = [o for o in orders if o.get("market") == market_id] + + # Debug: Print first order's fields to identify size field + if orders and self.verbose: + debug_logger = logging.getLogger(__name__) + debug_logger.debug(f"Sample order fields: {list(orders[0].keys())}") + debug_logger.debug(f"Sample order data: {orders[0]}") + + # Parse orders + return [self._parse_order(order) for order in orders] + except Exception as e: + if self.verbose: + print(f"Warning: Failed to fetch open orders: {e}") + traceback.print_exc() + return [] + + def fetch_positions( + self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None + ) -> list[Position]: + """ + Fetch current positions from Polymarket. + + Note: On Polymarket, positions are represented by conditional token balances. + This method queries token balances for the specified market. + Since positions require market-specific token data, we can't query positions + without a market context. Returns empty list if no market_id is provided. + """ + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized. Private key required.") + + # Positions require market context on Polymarket + # Without market_id, we can't determine which tokens to query + if not market_id: + return [] + + # For now, return empty positions list + # Positions will be queried on-demand when we have the market object with token IDs + # This avoids the chicken-and-egg problem of needing to fetch the market just to get positions + return [] + + def fetch_positions_for_market(self, market: Market) -> list[Position]: + """ + Fetch positions for a specific market object. + This is the recommended way to fetch positions on Polymarket. + + Args: + market: Market object with token IDs in metadata + + Returns: + List of Position objects + """ + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized. Private key required.") + + try: + positions = [] + token_ids_raw = market.metadata.get("clobTokenIds", []) + + # Parse token IDs if they're stored as JSON string + if isinstance(token_ids_raw, str): + token_ids = json.loads(token_ids_raw) + else: + token_ids = token_ids_raw + + if not token_ids or len(token_ids) < 2: + return positions + + # Query balance for each token + for i, token_id in enumerate(token_ids): + try: + params_obj = BalanceAllowanceParams( + asset_type=AssetType.CONDITIONAL, token_id=token_id + ) + balance_data = self._clob_client.get_balance_allowance(params=params_obj) + + if isinstance(balance_data, dict) and "balance" in balance_data: + balance_raw = balance_data["balance"] + # Convert from wei (6 decimals) + size = float(balance_raw) / 1e6 if balance_raw else 0.0 + + if size > 0: + # Determine outcome from market.outcomes + outcome = ( + market.outcomes[i] + if i < len(market.outcomes) + else ("Yes" if i == 0 else "No") + ) + + # Get current price from market.prices + current_price = market.prices.get(outcome, 0.0) + + position = Position( + market_id=market.id, + outcome=outcome, + size=size, + average_price=0.0, # Not available from balance query + current_price=current_price, + ) + positions.append(position) + except Exception as e: + if self.verbose: + print(f"Failed to fetch balance for token {token_id}: {e}") + continue + + return positions + + except Exception as e: + raise ExchangeError(f"Failed to fetch positions for market: {str(e)}") + + def fetch_balance(self) -> Dict[str, float]: + """ + Fetch account balance from Polymarket using CLOB client + + Returns: + Dictionary with balance information including USDC + """ + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized. Private key required.") + + try: + # Fetch USDC (collateral) balance + params = BalanceAllowanceParams(asset_type=AssetType.COLLATERAL) + balance_data = self._clob_client.get_balance_allowance(params=params) + + # Extract balance from response + usdc_balance = 0.0 + if isinstance(balance_data, dict) and "balance" in balance_data: + try: + # Balance is returned as a string in wei (6 decimals for USDC) + usdc_balance = float(balance_data["balance"]) / 1e6 + except (ValueError, TypeError): + usdc_balance = 0.0 + + return {"USDC": usdc_balance} + + except Exception as e: + raise ExchangeError(f"Failed to fetch balance: {str(e)}") + + def fetch_price_history( + self, + market: Market | str, + *, + outcome: int | str | None = None, + interval: Literal["1m", "1h", "6h", "1d", "1w", "max"] = "1m", + fidelity: int = 10, + as_dataframe: bool = False, + ) -> List[PricePoint] | pd.DataFrame: + if interval not in self.SUPPORTED_INTERVALS: + raise ValueError( + f"Unsupported interval '{interval}'. Pick from {self.SUPPORTED_INTERVALS}." + ) + + market_obj = self._ensure_market(market) + token_id = self._lookup_token_id(market_obj, outcome) + + params = { + "market": token_id, + "interval": interval, + "fidelity": fidelity, + } + + @self._retry_on_failure + def _fetch() -> List[Dict[str, Any]]: + resp = requests.get(self.PRICES_HISTORY_URL, params=params, timeout=self.timeout) + resp.raise_for_status() + payload = resp.json() + history = payload.get("history", []) + if not isinstance(history, list): + raise ExchangeError("Invalid response: 'history' must be a list.") + return history + + history = _fetch() + points = self._parse_history(history) + + if as_dataframe: + data = { + "timestamp": [p.timestamp for p in points], + "price": [p.price for p in points], + } + return pd.DataFrame(data).sort_values("timestamp").reset_index(drop=True) + + return points + + def _parse_order(self, data: Dict[str, Any]) -> Order: + """Parse order data from API response""" + order_id = data.get("id") or data.get("orderID") or "" + + # Try multiple field names for size (CLOB API may use different names) + size = float( + data.get("size") + or data.get("original_size") + or data.get("amount") + or data.get("original_amount") + or 0 + ) + filled = float(data.get("filled") or data.get("matched") or data.get("matched_amount") or 0) + + return Order( + id=order_id, + market_id=data.get("market_id", ""), + outcome=data.get("outcome", ""), + side=OrderSide(data.get("side", "buy").lower()), + price=float(data.get("price", 0)), + size=size, + filled=filled, + status=self._parse_order_status(data.get("status")), + created_at=self._parse_datetime(data.get("created_at")), + updated_at=self._parse_datetime(data.get("updated_at")), + ) + + def _parse_position(self, data: Dict[str, Any]) -> Position: + """Parse position data from API response""" + return Position( + market_id=data.get("market_id", ""), + outcome=data.get("outcome", ""), + size=float(data.get("size", 0)), + average_price=float(data.get("average_price", 0)), + current_price=float(data.get("current_price", 0)), + ) + + def _parse_order_status(self, status: str) -> OrderStatus: + """Convert string status to OrderStatus enum""" + status_map = { + "pending": OrderStatus.PENDING, + "open": OrderStatus.OPEN, + "filled": OrderStatus.FILLED, + "partially_filled": OrderStatus.PARTIALLY_FILLED, + "cancelled": OrderStatus.CANCELLED, + "rejected": OrderStatus.REJECTED, + } + return status_map.get(status, OrderStatus.OPEN) + + @staticmethod + def _extract_token_ids(market: Market) -> List[str]: + raw_ids = market.metadata.get("clobTokenIds", []) + if isinstance(raw_ids, str): + try: + raw_ids = json.loads(raw_ids) + except json.JSONDecodeError: + raw_ids = [raw_ids] + return [str(token_id) for token_id in raw_ids if token_id] + + def _lookup_token_id(self, market: Market, outcome: int | str | None) -> str: + token_ids = self._extract_token_ids(market) + if not token_ids: + raise ExchangeError("Cannot fetch price history without token IDs in metadata.") + + if outcome is None: + outcome_index = 0 + elif isinstance(outcome, int): + outcome_index = outcome + else: + try: + outcome_index = market.outcomes.index(outcome) + except ValueError as err: + raise ExchangeError(f"Outcome {outcome} not found in market {market.id}") from err + + if outcome_index < 0 or outcome_index >= len(token_ids): + raise ExchangeError( + f"Outcome index {outcome_index} out of range for market {market.id}" + ) + + return token_ids[outcome_index] + + def get_websocket(self) -> PolymarketWebSocket: + """ + Get WebSocket instance for real-time orderbook updates. + + The WebSocket automatically updates the exchange's mid-price cache + when orderbook data is received. + + Returns: + PolymarketWebSocket instance + + Example: + ws = exchange.get_websocket() + await ws.watch_orderbook(asset_id, callback) + ws.start() + """ + if self._ws is None: + self._ws = PolymarketWebSocket( + config={"verbose": self.verbose, "auto_reconnect": True}, exchange=self + ) + return self._ws + + def get_user_websocket(self) -> PolymarketUserWebSocket: + """ + Get User WebSocket instance for real-time trade/fill notifications. + + Requires CLOB client to be initialized (private key required). + + Returns: + PolymarketUserWebSocket instance + + Example: + user_ws = exchange.get_user_websocket() + user_ws.on_trade(lambda trade: print(f"Fill: {trade.size} @ {trade.price}")) + user_ws.start() + """ + if not self._clob_client: + raise AuthenticationError( + "CLOB client not initialized. Private key required for user WebSocket." + ) + + if self._user_ws is None: + # Get API credentials from CLOB client + creds = self._clob_client.creds + if not creds: + raise AuthenticationError("API credentials not available") + + self._user_ws = PolymarketUserWebSocket( + api_key=creds.api_key, + api_secret=creds.api_secret, + api_passphrase=creds.api_passphrase, + verbose=self.verbose, + ) + return self._user_ws + + def get_sports_websocket(self) -> PolymarketSportsWebSocket: + """ + Get a Sports WebSocket instance for real-time sports market updates. + + Returns: + PolymarketSportsWebSocket instance + """ + return PolymarketSportsWebSocket(verbose=self.verbose) + + def get_rtds_websocket(self) -> PolymarketRTDSWebSocket: + """ + Get a Real-Time Data Stream WebSocket for crypto prices and comments. + + Returns: + PolymarketRTDSWebSocket instance + """ + return PolymarketRTDSWebSocket(verbose=self.verbose) diff --git a/dr_manhattan/exchanges/polymarket/polymarket_core.py b/dr_manhattan/exchanges/polymarket/polymarket_core.py new file mode 100644 index 0000000..8547f79 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_core.py @@ -0,0 +1,412 @@ +from __future__ import annotations + +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence + +import requests +from py_clob_client.client import ClobClient + +from ...base.errors import ( + AuthenticationError, + ExchangeError, + MarketNotFound, + NetworkError, + RateLimitError, +) +from ...models.market import Market + + +@dataclass +class PublicTrade: + proxy_wallet: str + side: str + asset: str + condition_id: str + size: float + price: float + timestamp: datetime + title: str | None + slug: str | None + icon: str | None + event_slug: str | None + outcome: str | None + outcome_index: int | None + name: str | None + pseudonym: str | None + bio: str | None + profile_image: str | None + profile_image_optimized: str | None + transaction_hash: str | None + + +@dataclass +class PricePoint: + timestamp: datetime + price: float + raw: Dict[str, Any] + + +@dataclass +class Tag: + id: str + label: str | None + slug: str | None + force_show: bool | None + force_hide: bool | None + is_carousel: bool | None + published_at: str | None + created_at: str | None + updated_at: str | None + raw: dict + + +class PolymarketCore: + """Common infrastructure mixin: constants, init, HTTP, parsing helpers.""" + + BASE_URL = "https://gamma-api.polymarket.com" + CLOB_URL = "https://clob.polymarket.com" + PRICES_HISTORY_URL = f"{CLOB_URL}/prices-history" + DATA_API_URL = "https://data-api.polymarket.com" + SUPPORTED_INTERVALS: Sequence[str] = ("1m", "1h", "6h", "1d", "1w", "max") + + # CTF (Conditional Token Framework) constants + CTF_CONTRACT = "0x4D97DCd97eC945f40cF65F87097ACe5EA0476045" + USDC_E = "0x2791Bca1f2de4661ED88A30C99A7a9449Aa84174" + RELAYER_URL = "https://relayer-v2.polymarket.com" + ZERO_ADDRESS = "0x0000000000000000000000000000000000000000" + POLYGON_RPC_URL = "https://polygon-rpc.com" + CHAIN_ID = 137 + + # Safe ABI for nonce + SAFE_ABI = [ + { + "inputs": [], + "name": "nonce", + "outputs": [{"type": "uint256"}], + "stateMutability": "view", + "type": "function", + } + ] + + # Market type tags (Polymarket-specific) + TAG_1H = "102175" # 1-hour crypto price markets + + # Token normalization mapping + TOKEN_ALIASES = { + "BITCOIN": "BTC", + "ETHEREUM": "ETH", + "SOLANA": "SOL", + } + + @staticmethod + def normalize_token(token: str) -> str: + """Normalize token symbol to standard format (e.g., BITCOIN -> BTC)""" + token_upper = token.upper() + return PolymarketCore.TOKEN_ALIASES.get(token_upper, token_upper) + + @staticmethod + def parse_market_identifier(identifier: str) -> str: + """ + Parse market slug from URL or return slug as-is. + + Supports multiple URL formats: + - https://polymarket.com/event/SLUG + - https://polymarket.com/event/SLUG?param=value + - SLUG (direct slug input) + + Args: + identifier: Market slug or full URL + + Returns: + Market slug + + Example: + >>> Polymarket.parse_market_identifier("fed-decision-in-december") + 'fed-decision-in-december' + >>> Polymarket.parse_market_identifier("https://polymarket.com/event/fed-decision-in-december") + 'fed-decision-in-december' + """ + if not identifier: + return "" + + # If it's a URL, extract the slug + if identifier.startswith("http"): + # Remove query parameters + identifier = identifier.split("?")[0] + # Extract slug from URL + # Format: https://polymarket.com/event/SLUG + parts = identifier.rstrip("/").split("/") + if "event" in parts: + idx = parts.index("event") + if idx + 1 < len(parts): + return parts[idx + 1] + # Fallback: return last part + return parts[-1] + + return identifier + + @property + def id(self) -> str: + return "polymarket" + + @property + def name(self) -> str: + return "Polymarket" + + def __init__(self, config: Optional[Dict[str, Any]] = None): + """Initialize Polymarket exchange""" + super().__init__(config) + self._ws = None + self._user_ws = None + self.private_key = self.config.get("private_key") + self.funder = self.config.get("funder") + self._clob_client = None + self._address = None + self._w3 = None + + # Builder API credentials for CTF operations (split/merge/redeem) + self.builder_api_key = self.config.get("builder_api_key") + self.builder_secret = self.config.get("builder_secret") + self.builder_passphrase = self.config.get("builder_passphrase") + + # Initialize CLOB client if private key is provided + if self.private_key: + self._initialize_clob_client() + + def _initialize_clob_client(self): + """Initialize CLOB client with authentication.""" + try: + chain_id = self.config.get("chain_id", 137) + signature_type = self.config.get("signature_type", 2) + + # Initialize authenticated client + self._clob_client = ClobClient( + host=self.CLOB_URL, + key=self.private_key, + chain_id=chain_id, + signature_type=signature_type, + funder=self.funder, + ) + + # Derive and set API credentials for L2 authentication + api_creds = self._clob_client.create_or_derive_api_creds() + if not api_creds: + raise AuthenticationError("Failed to derive API credentials") + + self._clob_client.set_api_creds(api_creds) + + # Verify L2 mode + if self._clob_client.mode < 2: + raise AuthenticationError( + f"Client not in L2 mode (current mode: {self._clob_client.mode})" + ) + + # Store address + try: + self._address = self._clob_client.get_address() + except Exception: + self._address = None + + except AuthenticationError: + raise + except Exception as e: + raise AuthenticationError(f"Failed to initialize CLOB client: {e}") + + def _request(self, method: str, endpoint: str, params: Optional[Dict] = None) -> Any: + """Make HTTP request to Polymarket API with retry logic""" + + @self._retry_on_failure + def _make_request(): + url = f"{self.BASE_URL}{endpoint}" + headers = {} + + if self.api_key: + headers["Authorization"] = f"Bearer {self.api_key}" + + try: + response = requests.request( + method, url, params=params, headers=headers, timeout=self.timeout + ) + + # Handle rate limiting + if response.status_code == 429: + retry_after = int(response.headers.get("Retry-After", 1)) + raise RateLimitError(f"Rate limited. Retry after {retry_after}s") + + response.raise_for_status() + return response.json() + except requests.Timeout as e: + raise NetworkError(f"Request timeout: {e}") + except requests.ConnectionError as e: + raise NetworkError(f"Connection error: {e}") + except requests.HTTPError as e: + if response.status_code == 404: + raise ExchangeError(f"Resource not found: {endpoint}") + elif response.status_code == 401: + raise AuthenticationError(f"Authentication failed: {e}") + elif response.status_code == 403: + raise AuthenticationError(f"Access forbidden: {e}") + else: + raise ExchangeError(f"HTTP error: {e}") + except requests.RequestException as e: + raise ExchangeError(f"Request failed: {e}") + + return _make_request() + + def _collect_paginated( + self, + fetch_page: Callable[[int, int], List[Any]], + *, + total_limit: int, + initial_offset: int = 0, + page_size: int = 500, + dedup_key: Callable[[Any], Any] | None = None, + log: bool | None = False, + ) -> List[Any]: + if total_limit <= 0: + return [] + + results: List[Any] = [] + current_offset = int(initial_offset) + total_limit = int(total_limit) + page_size = max(1, int(page_size)) + + seen: set[Any] = set() if dedup_key else set() + + while len(results) < total_limit: + remaining = total_limit - len(results) + page_limit = min(page_size, remaining) + + if log: + print("current-offset:", current_offset) + print("page_limit:", page_limit) + print("----------") + + page = fetch_page(current_offset, page_limit) + + if not page: + break + + if dedup_key: + new_items: List[Any] = [] + for item in page: + key = dedup_key(item) + if key in seen: + continue + seen.add(key) + new_items.append(item) + + if not new_items: + break + + results.extend(new_items) + else: + results.extend(page) + + current_offset += len(page) + + if len(page) < page_limit: + break + + if len(results) > total_limit: + results = results[:total_limit] + + return results + + def _parse_datetime(self, timestamp: Optional[Any]) -> Optional[datetime]: + """Parse datetime from various formats""" + if not timestamp: + return None + + if isinstance(timestamp, datetime): + return timestamp + + try: + if isinstance(timestamp, (int, float)): + return datetime.fromtimestamp(timestamp) + return datetime.fromisoformat(str(timestamp)) + except (ValueError, TypeError): + return None + + def _ensure_market(self, market: Market | str) -> Market: + if isinstance(market, Market): + return market + fetched = self.fetch_market(market) + if not fetched: + raise MarketNotFound(f"Market {market} not found") + return fetched + + # ------------------------------------------------------------------ + # ID resolvers: Market | str → specific ID type + # ------------------------------------------------------------------ + + def _resolve_condition_id(self, market: Market | str) -> str: + """Extract condition_id from Market object or pass through str.""" + if isinstance(market, Market): + # Try both key formats (Gamma uses conditionId, CLOB uses condition_id) + cid = ( + market.metadata.get("conditionId") + or market.metadata.get("condition_id") + or market.id + ) + return str(cid) + return market + + def _resolve_gamma_id(self, market: Market | str) -> str: + """Extract Gamma numeric ID from Market object or pass through str. + + If Market has no Gamma ID, fetches it via Gamma API. + """ + if isinstance(market, Market): + # Gamma API stores numeric id under "id" key (not always present in CLOB data) + gid = market.metadata.get("id") + if gid and str(gid).isdigit(): + return str(gid) + # Fallback: fetch from Gamma via condition_id → need to resolve + cid = self._resolve_condition_id(market) + fetched = self.fetch_market(cid) + gid = fetched.metadata.get("id") + if gid and str(gid).isdigit(): + return str(gid) + raise ExchangeError("Could not resolve Gamma numeric ID for this market") + return market + + def _resolve_token_id(self, market: Market | str, outcome: int | str = 0) -> str: + """Extract token_id for a given outcome from Market object or pass through str. + + Args: + market: Market object or raw token_id/condition_id string. + If a condition_id (0x...) is passed, token_ids are fetched via CLOB. + outcome: 0/"Yes" for first token, 1/"No" for second token. + Ignored if a raw token_id string is passed. + """ + if isinstance(market, str): + # If it looks like a condition_id, resolve to token_id + if market.startswith("0x") and len(market) == 66: + token_ids = self.fetch_token_ids(market) + idx = 0 + if isinstance(outcome, int): + idx = outcome + elif isinstance(outcome, str) and outcome.lower() in ("no", "1"): + idx = 1 + if idx >= len(token_ids): + raise ExchangeError(f"Token index {idx} out of range") + return str(token_ids[idx]) + # Otherwise assume it's already a token_id + return market + # Market object + token_ids = market.metadata.get("clobTokenIds", []) + if not token_ids: + token_ids = self._extract_token_ids(market) + if not token_ids: + raise ExchangeError("Market object has no token IDs") + idx = 0 + if isinstance(outcome, int): + idx = outcome + elif isinstance(outcome, str): + if outcome.lower() in ("no", "1"): + idx = 1 + if idx >= len(token_ids): + raise ExchangeError(f"Token index {idx} out of range (have {len(token_ids)} tokens)") + return str(token_ids[idx]) diff --git a/dr_manhattan/exchanges/polymarket/polymarket_ctf.py b/dr_manhattan/exchanges/polymarket/polymarket_ctf.py new file mode 100644 index 0000000..264145e --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_ctf.py @@ -0,0 +1,604 @@ +from __future__ import annotations + +import base64 +import hashlib +import hmac +import time +from typing import Any, Dict, List, Optional + +import requests +from eth_abi import encode as abi_encode +from eth_account import Account +from eth_account.messages import encode_defunct +from web3 import Web3 + +from ...base.errors import AuthenticationError, ExchangeError +from ...models.market import Market + + +class PolymarketCTF: + """CTF (Conditional Token Framework) mixin: split, merge, redeem operations.""" + + # ========================================================================= + # Web3 / Safe Helpers + # ========================================================================= + + def _get_web3(self) -> Web3: + """Get or initialize Web3 instance""" + if self._w3 is None: + rpc_url = self.config.get("rpc_url", self.POLYGON_RPC_URL) + self._w3 = Web3(Web3.HTTPProvider(rpc_url)) + return self._w3 + + def _get_eoa_address(self) -> str: + """Get EOA address from private key""" + if not self.private_key: + raise AuthenticationError("Private key required for CTF operations") + pk = self.private_key + if pk.startswith("0x"): + pk = pk[2:] + account = Account.from_key(pk) + return account.address + + def _get_safe_nonce(self) -> int: + """Get current nonce from Safe contract on-chain""" + if not self.funder: + raise AuthenticationError("Funder (Safe) address required for CTF operations") + w3 = self._get_web3() + safe = w3.eth.contract(address=Web3.to_checksum_address(self.funder), abi=self.SAFE_ABI) + return safe.functions.nonce().call() + + def _compute_safe_tx_hash( + self, + to: str, + data: bytes, + nonce: int, + ) -> bytes: + """Compute EIP-712 Safe transaction hash""" + # Safe Transaction TypeHash + safe_tx_typehash = Web3.keccak( + text="SafeTx(address to,uint256 value,bytes data,uint8 operation," + "uint256 safeTxGas,uint256 baseGas,uint256 gasPrice,address gasToken," + "address refundReceiver,uint256 nonce)" + ) + + # Domain separator + domain_separator_typehash = Web3.keccak( + text="EIP712Domain(uint256 chainId,address verifyingContract)" + ) + + domain_separator = Web3.keccak( + abi_encode( + ["bytes32", "uint256", "address"], + [ + domain_separator_typehash, + self.CHAIN_ID, + Web3.to_checksum_address(self.funder), + ], + ) + ) + + # Encode transaction data + data_hash = Web3.keccak(data) + + safe_tx_data = abi_encode( + [ + "bytes32", + "address", + "uint256", + "bytes32", + "uint8", + "uint256", + "uint256", + "uint256", + "address", + "address", + "uint256", + ], + [ + safe_tx_typehash, + Web3.to_checksum_address(to), + 0, # value + data_hash, + 0, # operation (Call) + 0, # safeTxGas + 0, # baseGas + 0, # gasPrice + Web3.to_checksum_address(self.ZERO_ADDRESS), # gasToken + Web3.to_checksum_address(self.ZERO_ADDRESS), # refundReceiver + nonce, + ], + ) + + safe_tx_hash = Web3.keccak(safe_tx_data) + + # Final hash + final_hash = Web3.keccak(b"\x19\x01" + domain_separator + safe_tx_hash) + + return final_hash + + def _sign_safe_transaction(self, to: str, data: str, nonce: int) -> str: + """Sign a Safe transaction and return the signature""" + if not self.private_key: + raise AuthenticationError("Private key required for signing") + + pk = self.private_key + if pk.startswith("0x"): + pk = pk[2:] + + # Convert data to bytes + data_bytes = bytes.fromhex(data[2:]) if data.startswith("0x") else bytes.fromhex(data) + + # Compute hash + tx_hash = self._compute_safe_tx_hash(to=to, data=data_bytes, nonce=nonce) + + # Sign with eth_sign style (adds prefix) + account = Account.from_key(pk) + message = encode_defunct(primitive=tx_hash) + signed = account.sign_message(message) + + # Adjust v for Safe (add 4) + v = signed.v + 4 + signature = ( + signed.r.to_bytes(32, "big") + signed.s.to_bytes(32, "big") + v.to_bytes(1, "big") + ) + + return "0x" + signature.hex() + + # ========================================================================= + # Builder API / Relayer Helpers + # ========================================================================= + + def _build_hmac_signature( + self, secret: str, timestamp: str, method: str, request_path: str, body: str = None + ) -> str: + """Creates HMAC signature for Builder API authentication""" + base64_secret = base64.urlsafe_b64decode(secret) + message = str(timestamp) + str(method) + str(request_path) + if body: + message += str(body).replace("'", '"') + h = hmac.new(base64_secret, bytes(message, "utf-8"), hashlib.sha256) + return base64.urlsafe_b64encode(h.digest()).decode("utf-8") + + def _get_builder_headers(self, method: str, path: str, body: dict = None) -> Dict[str, str]: + """Generate Builder API authentication headers""" + if not all([self.builder_api_key, self.builder_secret, self.builder_passphrase]): + raise AuthenticationError( + "Builder API credentials required " + "(builder_api_key, builder_secret, builder_passphrase)" + ) + + timestamp = str(int(time.time())) + + body_str = None + if body: + body_str = str(body).replace("'", '"') + + signature = self._build_hmac_signature( + self.builder_secret, timestamp, method, path, body_str + ) + + return { + "POLY_BUILDER_API_KEY": self.builder_api_key, + "POLY_BUILDER_SIGNATURE": signature, + "POLY_BUILDER_TIMESTAMP": timestamp, + "POLY_BUILDER_PASSPHRASE": self.builder_passphrase, + "Content-Type": "application/json", + } + + def _submit_to_relayer(self, to: str, data: str, nonce: int, signature: str) -> Dict[str, Any]: + """Submit transaction to Polymarket Relayer""" + path = "/submit" + + eoa_address = self._get_eoa_address() + + payload = { + "type": "SAFE", + "from": eoa_address.lower(), + "to": to.lower(), + "proxyWallet": self.funder.lower(), + "data": data, + "nonce": str(nonce), + "value": "", + "signature": signature, + "signatureParams": { + "gasPrice": "0", + "operation": "0", + "safeTxnGas": "0", + "baseGas": "0", + "gasToken": self.ZERO_ADDRESS, + "refundReceiver": self.ZERO_ADDRESS, + }, + } + + headers = self._get_builder_headers("POST", path, payload) + + response = requests.post( + f"{self.RELAYER_URL}{path}", + json=payload, + headers=headers, + timeout=30, + ) + + if response.status_code != 200: + raise ExchangeError(f"Relayer error: {response.status_code} - {response.text}") + + return response.json() + + def _poll_transaction( + self, transaction_id: str, max_polls: int = 20 + ) -> Optional[Dict[str, Any]]: + """Poll for transaction status""" + path = f"/transaction?id={transaction_id}" + + for _ in range(max_polls): + try: + response = requests.get(f"{self.RELAYER_URL}{path}", timeout=10) + if response.status_code == 200: + txns = response.json() + if txns and len(txns) > 0: + state = txns[0].get("state") + if state in ["STATE_MINED", "STATE_CONFIRMED", "STATE_EXECUTED"]: + return txns[0] + if state == "STATE_FAILED": + return None + except Exception: + pass + time.sleep(2) + + return None + + # ========================================================================= + # CTF Encoding Helpers + # ========================================================================= + + def _encode_split_position(self, condition_id: str, amount_wei: int) -> str: + """Encode splitPosition function call""" + # Function selector for splitPosition + selector = Web3.keccak(text="splitPosition(address,bytes32,bytes32,uint256[],uint256)")[:4] + + if condition_id.startswith("0x"): + condition_id_bytes = bytes.fromhex(condition_id[2:]) + else: + condition_id_bytes = bytes.fromhex(condition_id) + + encoded_params = abi_encode( + ["address", "bytes32", "bytes32", "uint256[]", "uint256"], + [ + self.USDC_E, + bytes.fromhex("00" * 32), # parentCollectionId = 0 + condition_id_bytes, + [1, 2], # partition for binary markets + amount_wei, + ], + ) + + return "0x" + selector.hex() + encoded_params.hex() + + def _encode_merge_positions(self, condition_id: str, amount_wei: int) -> str: + """Encode mergePositions function call""" + # Function selector for mergePositions + selector = Web3.keccak(text="mergePositions(address,bytes32,bytes32,uint256[],uint256)")[:4] + + if condition_id.startswith("0x"): + condition_id_bytes = bytes.fromhex(condition_id[2:]) + else: + condition_id_bytes = bytes.fromhex(condition_id) + + encoded_params = abi_encode( + ["address", "bytes32", "bytes32", "uint256[]", "uint256"], + [ + self.USDC_E, + bytes.fromhex("00" * 32), # parentCollectionId = 0 + condition_id_bytes, + [1, 2], # partition for binary markets + amount_wei, + ], + ) + + return "0x" + selector.hex() + encoded_params.hex() + + def _encode_redeem_positions(self, condition_id: str) -> str: + """Encode redeemPositions function call""" + # Function selector (verified: 0x01b7037c) + selector = bytes.fromhex("01b7037c") + + if condition_id.startswith("0x"): + condition_id_bytes = bytes.fromhex(condition_id[2:]) + else: + condition_id_bytes = bytes.fromhex(condition_id) + + encoded_params = abi_encode( + ["address", "bytes32", "bytes32", "uint256[]"], + [ + self.USDC_E, + bytes.fromhex("00" * 32), # parentCollectionId = 0 + condition_id_bytes, + [1, 2], # Both outcomes + ], + ) + + return "0x" + selector.hex() + encoded_params.hex() + + # ========================================================================= + # Public CTF Methods: Split, Merge, Redeem + # ========================================================================= + + def split( + self, + market: Market | str, + amount: float, + wait_for_confirmation: bool = True, + ) -> Dict[str, Any]: + """ + Split USDC into Yes and No conditional tokens. + + Args: + market: Market object or condition_id string (hex) + amount: Amount of USDC to split (e.g., 10.0 = $10) + wait_for_confirmation: If True, wait for transaction to be mined + + Returns: + Dict with transaction details: + - tx_id: Relayer transaction ID + - tx_hash: On-chain transaction hash (if confirmed) + - status: Transaction status + - condition_id: The condition ID + - amount: Amount split + + Example: + >>> result = exchange.split("0x123...", 10.0) + >>> print(f"Split {result['amount']} USDC, tx: {result['tx_hash']}") + """ + # Validate credentials + condition_id = self._resolve_condition_id(market) + if not self.funder: + raise AuthenticationError("Funder (Safe) address required for split") + + # Convert amount to wei (USDC has 6 decimals) + amount_wei = int(amount * 1e6) + + # Get Safe nonce + nonce = self._get_safe_nonce() + + # Encode transaction data + data = self._encode_split_position(condition_id, amount_wei) + + # Sign transaction + signature = self._sign_safe_transaction(to=self.CTF_CONTRACT, data=data, nonce=nonce) + + # Submit to relayer + result = self._submit_to_relayer( + to=self.CTF_CONTRACT, data=data, nonce=nonce, signature=signature + ) + + tx_id = result.get("transactionID") + + response = { + "tx_id": tx_id, + "tx_hash": None, + "status": "submitted", + "condition_id": condition_id, + "amount": amount, + } + + # Poll for confirmation if requested + if wait_for_confirmation and tx_id: + final = self._poll_transaction(tx_id) + if final: + response["tx_hash"] = final.get("transactionHash") + response["status"] = final.get("state", "confirmed") + else: + response["status"] = "timeout_or_failed" + + return response + + def merge( + self, + market: Market | str, + amount: float, + wait_for_confirmation: bool = True, + ) -> Dict[str, Any]: + """ + Merge Yes and No conditional tokens back into USDC. + + Args: + market: Market object or condition_id string (hex) + amount: Amount of token pairs to merge (e.g., 10.0 = 10 Yes + 10 No -> 10 USDC) + wait_for_confirmation: If True, wait for transaction to be mined + + Returns: + Dict with transaction details: + - tx_id: Relayer transaction ID + - tx_hash: On-chain transaction hash (if confirmed) + - status: Transaction status + - condition_id: The condition ID + - amount: Amount merged + + Example: + >>> result = exchange.merge("0x123...", 10.0) + >>> print(f"Merged {result['amount']} tokens, tx: {result['tx_hash']}") + """ + # Validate credentials + condition_id = self._resolve_condition_id(market) + if not self.funder: + raise AuthenticationError("Funder (Safe) address required for merge") + + # Convert amount to wei (USDC has 6 decimals) + amount_wei = int(amount * 1e6) + + # Get Safe nonce + nonce = self._get_safe_nonce() + + # Encode transaction data + data = self._encode_merge_positions(condition_id, amount_wei) + + # Sign transaction + signature = self._sign_safe_transaction(to=self.CTF_CONTRACT, data=data, nonce=nonce) + + # Submit to relayer + result = self._submit_to_relayer( + to=self.CTF_CONTRACT, data=data, nonce=nonce, signature=signature + ) + + tx_id = result.get("transactionID") + + response = { + "tx_id": tx_id, + "tx_hash": None, + "status": "submitted", + "condition_id": condition_id, + "amount": amount, + } + + # Poll for confirmation if requested + if wait_for_confirmation and tx_id: + final = self._poll_transaction(tx_id) + if final: + response["tx_hash"] = final.get("transactionHash") + response["status"] = final.get("state", "confirmed") + else: + response["status"] = "timeout_or_failed" + + return response + + def redeem( + self, + market: Market | str, + wait_for_confirmation: bool = True, + ) -> Dict[str, Any]: + """ + Redeem winning tokens from a resolved market. + + Args: + market: Market object or condition_id string (hex) of a resolved market + wait_for_confirmation: If True, wait for transaction to be mined + + Returns: + Dict with transaction details: + - tx_id: Relayer transaction ID + - tx_hash: On-chain transaction hash (if confirmed) + - status: Transaction status + - condition_id: The condition ID + + Example: + >>> result = exchange.redeem("0x123...") + >>> print(f"Redeemed, tx: {result['tx_hash']}") + """ + # Validate credentials + condition_id = self._resolve_condition_id(market) + if not self.funder: + raise AuthenticationError("Funder (Safe) address required for redeem") + + # Get Safe nonce + nonce = self._get_safe_nonce() + + # Encode transaction data + data = self._encode_redeem_positions(condition_id) + + # Sign transaction + signature = self._sign_safe_transaction(to=self.CTF_CONTRACT, data=data, nonce=nonce) + + # Submit to relayer + result = self._submit_to_relayer( + to=self.CTF_CONTRACT, data=data, nonce=nonce, signature=signature + ) + + tx_id = result.get("transactionID") + + response = { + "tx_id": tx_id, + "tx_hash": None, + "status": "submitted", + "condition_id": condition_id, + } + + # Poll for confirmation if requested + if wait_for_confirmation and tx_id: + final = self._poll_transaction(tx_id) + if final: + response["tx_hash"] = final.get("transactionHash") + response["status"] = final.get("state", "confirmed") + else: + response["status"] = "timeout_or_failed" + + return response + + def fetch_redeemable_positions(self) -> List[Dict[str, Any]]: + """ + Fetch positions that can be redeemed (from resolved markets). + + Returns: + List of redeemable position dictionaries with fields: + - conditionId: The condition ID + - title: Market title + - outcome: Winning outcome + - size: Token amount + - currentValue: Value in USDC + + Example: + >>> positions = exchange.fetch_redeemable_positions() + >>> for pos in positions: + ... print(f"{pos['title']}: {pos['outcome']} - ${pos['currentValue']}") + """ + if not self.funder: + raise AuthenticationError( + "Funder (Safe) address required to fetch redeemable positions" + ) + + url = f"{self.DATA_API_URL}/positions" + params = {"user": self.funder.lower(), "redeemable": "true"} + + try: + response = requests.get(url, params=params, timeout=30) + response.raise_for_status() + data = response.json() + return data if isinstance(data, list) else [] + except Exception as e: + raise ExchangeError(f"Failed to fetch redeemable positions: {e}") + + def redeem_all( + self, + wait_for_confirmation: bool = True, + ) -> List[Dict[str, Any]]: + """ + Redeem all redeemable positions. + + Args: + wait_for_confirmation: If True, wait for each transaction to be mined + + Returns: + List of redemption results for each condition + + Example: + >>> results = exchange.redeem_all() + >>> for r in results: + ... print(f"{r['condition_id']}: {r['status']}") + """ + positions = self.fetch_redeemable_positions() + if not positions: + return [] + + # Extract unique condition IDs + condition_ids = list( + set(pos.get("conditionId") for pos in positions if pos.get("conditionId")) + ) + + results = [] + for condition_id in condition_ids: + try: + result = self.redeem( + condition_id=condition_id, + wait_for_confirmation=wait_for_confirmation, + ) + results.append(result) + except Exception as e: + results.append( + { + "condition_id": condition_id, + "status": "error", + "error": str(e), + } + ) + + return results diff --git a/dr_manhattan/exchanges/polymarket/polymarket_data.py b/dr_manhattan/exchanges/polymarket/polymarket_data.py new file mode 100644 index 0000000..87c8d9c --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_data.py @@ -0,0 +1,494 @@ +from __future__ import annotations + +from datetime import datetime, timezone +from typing import Any, Dict, List, Literal, Optional + +import pandas as pd +import requests + +from ...base.errors import ExchangeError +from ...models.market import Market +from .polymarket_core import PublicTrade + + +class PolymarketData: + """Data API mixin: public trades, leaderboard, activity, holders, open interest.""" + + def fetch_public_trades( + self, + market: Market | str | None = None, + *, + limit: int = 100, + offset: int = 0, + event_id: int | None = None, + user: str | None = None, + side: Literal["BUY", "SELL"] | None = None, + taker_only: bool = True, + filter_type: Literal["CASH", "TOKENS"] | None = None, + filter_amount: float | None = None, + as_dataframe: bool = False, + log: bool = False, + ) -> List[PublicTrade] | pd.DataFrame: + total_limit = int(limit) + if total_limit <= 0: + return [] + + if offset < 0 or offset > 10000: + raise ValueError("offset must be between 0 and 10000") + + initial_offset = int(offset) + default_page_size_trades = 500 + page_size = min(default_page_size_trades, total_limit) + + # ---------- condition_id resolve ---------- + condition_id: str | None = None + if isinstance(market, Market): + condition_id = str(market.metadata.get("conditionId", market.id)) + elif isinstance(market, str): + condition_id = market + + base_params: Dict[str, Any] = { + "takerOnly": "true" if taker_only else "false", + } + + if condition_id: + base_params["market"] = condition_id + if event_id is not None: + base_params["eventId"] = event_id + if user: + base_params["user"] = user + if side: + base_params["side"] = side + + if filter_type or filter_amount is not None: + if not filter_type or filter_amount is None: + raise ValueError("filter_type and filter_amount must be provided together") + base_params["filterType"] = filter_type + base_params["filterAmount"] = filter_amount + + # ---------- pagination via helper ---------- + @self._retry_on_failure + def _fetch_page(offset_: int, limit_: int) -> List[Dict[str, Any]]: + params = { + **base_params, + "limit": limit_, + "offset": offset_, + } + + resp = requests.get( + f"{self.DATA_API_URL}/trades", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + if not isinstance(data, list): + raise ExchangeError("Data-API /trades response must be a list.") + return data + + def _dedup_key(row: Dict[str, Any]) -> tuple[Any, ...]: + # transactionHash + timestamp + side + asset + size + price + return (row.get("transactionHash"), row.get("outcomeIndex")) + + raw_trades: List[Dict[str, Any]] = self._collect_paginated( + _fetch_page, + total_limit=total_limit, + initial_offset=initial_offset, + page_size=page_size, + dedup_key=_dedup_key, + log=log, + ) + + # ---------- Dict -> PublicTrade ---------- + trades: List[PublicTrade] = [] + + for row in raw_trades[:total_limit]: + ts = row.get("timestamp") + if isinstance(ts, (int, float)): + ts_dt = datetime.fromtimestamp(int(ts), tz=timezone.utc) + elif isinstance(ts, str) and ts.isdigit(): + ts_dt = datetime.fromtimestamp(int(ts), tz=timezone.utc) + else: + ts_dt = datetime.fromtimestamp(0, tz=timezone.utc) + + trades.append( + PublicTrade( + proxy_wallet=row.get("proxyWallet", ""), + side=row.get("side", ""), + asset=row.get("asset", ""), + condition_id=row.get("conditionId", ""), + size=float(row.get("size", 0) or 0), + price=float(row.get("price", 0) or 0), + timestamp=ts_dt, + title=row.get("title"), + slug=row.get("slug"), + icon=row.get("icon"), + event_slug=row.get("eventSlug"), + outcome=row.get("outcome"), + outcome_index=row.get("outcomeIndex"), + name=row.get("name"), + pseudonym=row.get("pseudonym"), + bio=row.get("bio"), + profile_image=row.get("profileImage"), + profile_image_optimized=row.get("profileImageOptimized"), + transaction_hash=row.get("transactionHash"), + ) + ) + + if not as_dataframe: + return trades + + # ---------- as_dataframe=True: Convert to DataFrame---------- + + df = pd.DataFrame( + [ + { + "timestamp": t.timestamp, + "side": t.side, + "asset": t.asset, + "condition_id": t.condition_id, + "size": t.size, + "price": t.price, + "proxy_wallet": t.proxy_wallet, + "title": t.title, + "slug": t.slug, + "event_slug": t.event_slug, + "outcome": t.outcome, + "outcome_index": t.outcome_index, + "name": t.name, + "pseudonym": t.pseudonym, + "bio": t.bio, + "profile_image": t.profile_image, + "profile_image_optimized": t.profile_image_optimized, + "transaction_hash": t.transaction_hash, + } + for t in trades + ] + ) + + return df.sort_values("timestamp").reset_index(drop=True) + + # ========================================================================= + # New Data API methods + # ========================================================================= + + def fetch_leaderboard( + self, + limit: int = 25, + offset: int = 0, + order_by: Literal["PNL", "VOL"] = "PNL", + time_period: Literal["DAY", "WEEK", "MONTH", "ALL"] = "DAY", + category: Literal[ + "OVERALL", + "POLITICS", + "SPORTS", + "CRYPTO", + "CULTURE", + "MENTIONS", + "WEATHER", + "ECONOMICS", + "TECH", + "FINANCE", + ] = "OVERALL", + user: Optional[str] = None, + ) -> List[Dict]: + """ + Fetch the trader leaderboard rankings from the Data API. + + Args: + limit: Max number of traders to return (1-50, default 25) + offset: Starting index for pagination (0-1000) + order_by: Sort criteria — "PNL" or "VOL" + time_period: Time window — "DAY", "WEEK", "MONTH", or "ALL" + category: Market category filter + user: Filter to a single user by wallet address + + Returns: + List of leaderboard entry dicts with keys: + rank, proxyWallet, userName, vol, pnl, profileImage, xUsername, verifiedBadge + """ + + @self._retry_on_failure + def _fetch(): + params: Dict[str, Any] = { + "limit": min(limit, 50), + "offset": offset, + "orderBy": order_by, + "timePeriod": time_period, + "category": category, + } + if user: + params["user"] = user + resp = requests.get( + f"{self.DATA_API_URL}/v1/leaderboard", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_user_activity(self, address: str, limit: int = 100, offset: int = 0) -> List[Dict]: + """ + Fetch user activity from the Data API. + + Args: + address: User wallet address + limit: Maximum number of entries to return + offset: Pagination offset + + Returns: + List of activity entry dictionaries + """ + + @self._retry_on_failure + def _fetch(): + params = {"user": address, "limit": limit, "offset": offset} + resp = requests.get( + f"{self.DATA_API_URL}/activity", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_top_holders( + self, market: Market | str, limit: int = 100, offset: int = 0 + ) -> List[Dict]: + """ + Fetch top token holders for a market from the Data API. + + Args: + market: Market object or condition_id string + limit: Maximum number of entries to return + offset: Pagination offset + + Returns: + List of holder dictionaries + """ + condition_id = self._resolve_condition_id(market) + + @self._retry_on_failure + def _fetch(): + params = {"market": condition_id, "limit": limit, "offset": offset} + resp = requests.get( + f"{self.DATA_API_URL}/holders", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_open_interest(self, market: Market | str) -> Dict: + """ + Fetch open interest for a market from the Data API. + + Args: + market: Market object or condition_id string + + Returns: + Open interest dictionary + """ + condition_id = self._resolve_condition_id(market) + + @self._retry_on_failure + def _fetch(): + params = {"market": condition_id} + resp = requests.get( + f"{self.DATA_API_URL}/oi", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def fetch_closed_positions(self, address: str, limit: int = 100, offset: int = 0) -> List[Dict]: + """ + Fetch closed positions for a user from the Data API. + + Args: + address: User wallet address + limit: Maximum number of entries to return + offset: Pagination offset + + Returns: + List of closed position dictionaries + """ + + @self._retry_on_failure + def _fetch(): + params = {"user": address, "limit": limit, "offset": offset} + resp = requests.get( + f"{self.DATA_API_URL}/closed-positions", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_positions_data(self, address: str, limit: int = 100, offset: int = 0) -> List[Dict]: + """ + Fetch current positions for a user from the Data API. + + Args: + address: User wallet address + limit: Maximum number of entries to return + offset: Pagination offset + + Returns: + List of position dictionaries + """ + + @self._retry_on_failure + def _fetch(): + params = {"user": address, "limit": limit, "offset": offset} + resp = requests.get( + f"{self.DATA_API_URL}/positions", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_portfolio_value(self, address: str) -> Dict: + """ + Fetch total value of a user's positions. + + Args: + address: User wallet address + + Returns: + Portfolio value dictionary + """ + + @self._retry_on_failure + def _fetch(): + params = {"user": address} + resp = requests.get( + f"{self.DATA_API_URL}/value", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def fetch_live_volume(self, event_id: int) -> Dict: + """ + Fetch live volume for an event. + + Args: + event_id: The event ID (numeric) + + Returns: + Live volume dictionary + """ + + @self._retry_on_failure + def _fetch(): + params = {"id": event_id} + resp = requests.get( + f"{self.DATA_API_URL}/live-volume", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def fetch_traded_count(self, address: str) -> Dict: + """ + Fetch total markets a user has traded. + + Args: + address: User wallet address + + Returns: + Traded count dictionary + """ + + @self._retry_on_failure + def _fetch(): + params = {"user": address} + resp = requests.get( + f"{self.DATA_API_URL}/traded", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def fetch_builder_leaderboard( + self, limit: int = 25, offset: int = 0, period: str = "DAY" + ) -> List[Dict]: + """ + Fetch aggregated builder leaderboard. + + Args: + limit: Maximum number of entries to return + offset: Pagination offset + period: Time period ("DAY", "WEEK", "MONTH", "ALL") + + Returns: + List of builder leaderboard entries + """ + + @self._retry_on_failure + def _fetch(): + params = {"limit": limit, "offset": offset, "period": period} + resp = requests.get( + f"{self.DATA_API_URL}/v1/builders/leaderboard", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_builder_volume(self, builder_id: str, period: str = "DAY") -> List[Dict]: + """ + Fetch daily builder volume time series. + + Args: + builder_id: The builder ID + period: Time period ("DAY", "WEEK", "MONTH", "ALL") + + Returns: + List of volume data points + """ + + @self._retry_on_failure + def _fetch(): + params = {"builderId": builder_id, "period": period} + resp = requests.get( + f"{self.DATA_API_URL}/v1/builders/volume", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() diff --git a/dr_manhattan/exchanges/polymarket/polymarket_gamma.py b/dr_manhattan/exchanges/polymarket/polymarket_gamma.py new file mode 100644 index 0000000..8f2ac7a --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_gamma.py @@ -0,0 +1,1190 @@ +from __future__ import annotations + +import json +import re +from datetime import datetime, timedelta, timezone +from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence + +import requests + +from ...base.errors import ( + ExchangeError, + MarketNotFound, + NetworkError, +) +from ...models import CryptoHourlyMarket +from ...models.market import Market +from ...utils import setup_logger +from .polymarket_core import PricePoint, Tag + + +class PolymarketGamma: + """Gamma API mixin: market discovery, search, tags, crypto hourly markets.""" + + def fetch_markets(self, params: Optional[Dict[str, Any]] = None) -> list[Market]: + """ + Fetch all markets from Polymarket + + Uses CLOB API instead of Gamma API because CLOB includes token IDs + which are required for trading. + """ + + @self._retry_on_failure + def _fetch(): + # Fetch from CLOB API /sampling-markets (includes token IDs and live markets) + try: + response = requests.get(f"{self.CLOB_URL}/sampling-markets", timeout=self.timeout) + + if response.status_code == 200: + result = response.json() + markets_data = result.get("data", result if isinstance(result, list) else []) + + markets = [] + for item in markets_data: + market = self._parse_sampling_market(item) + if market: + markets.append(market) + + # Apply filters if provided + query_params = params or {} + if query_params.get("active") or (not query_params.get("closed", True)): + markets = [m for m in markets if m.is_open] + + # Apply limit if provided + limit = query_params.get("limit") + if limit: + markets = markets[:limit] + + if self.verbose: + print(f"✓ Fetched {len(markets)} markets from CLOB API (sampling-markets)") + + return markets + + except Exception as e: + if self.verbose: + print(f"CLOB API fetch failed: {e}, falling back to Gamma API") + + # Fallback to Gamma API (but won't have token IDs) + query_params = params or {} + if "active" not in query_params and "closed" not in query_params: + query_params = {"active": True, "closed": False, **query_params} + + data = self._request("GET", "/markets", query_params) + markets = [] + for item in data: + market = self._parse_market(item) + markets.append(market) + return markets + + return _fetch() + + def fetch_market(self, market: Market | str) -> Market: + """Fetch specific market by ID with retry logic. + + Args: + market: Market object, Gamma numeric ID, condition_id (0x...), + token_id (long numeric), or slug string. + """ + if isinstance(market, Market): + market_id = market.metadata.get("id", market.id) + else: + market_id = market + + logger = setup_logger(__name__) + + def _warn_multiple(results, identifier): + if len(results) > 1: + logger.warning( + f"Multiple markets ({len(results)}) matched '{identifier}'. " + f"Returning first: id={results[0].get('id')}, " + f"question='{results[0].get('question', '')[:50]}'" + ) + + @self._retry_on_failure + def _fetch(): + identifier = str(market_id) + + # Gamma numeric ID → direct lookup + if identifier.isdigit() and len(identifier) < 20: + try: + data = self._request("GET", f"/markets/{identifier}") + return self._parse_market(data) + except ExchangeError: + raise MarketNotFound(f"Market {identifier} not found") + + # Condition ID (0x...) → get token_ids from CLOB, then query Gamma by clob_token_ids + if identifier.startswith("0x"): + try: + token_ids = self.fetch_token_ids(identifier) + if token_ids: + gamma_resp = requests.get( + f"{self.BASE_URL}/markets", + params={"clob_token_ids": str(token_ids[0])}, + timeout=self.timeout, + ) + if gamma_resp.status_code == 200: + results = gamma_resp.json() + if results: + _warn_multiple(results, identifier) + return self._parse_market(results[0]) + except Exception: + pass + raise MarketNotFound(f"Market {identifier} not found") + + # Long numeric string → token_id → query Gamma by clob_token_ids + if identifier.isdigit() and len(identifier) >= 20: + try: + resp = requests.get( + f"{self.BASE_URL}/markets", + params={"clob_token_ids": identifier}, + timeout=self.timeout, + ) + if resp.status_code == 200: + results = resp.json() + if results: + _warn_multiple(results, identifier) + return self._parse_market(results[0]) + except Exception: + pass + raise MarketNotFound(f"Market {identifier} not found") + + # Slug → query by slug + try: + resp = requests.get( + f"{self.BASE_URL}/markets", + params={"slug": identifier}, + timeout=self.timeout, + ) + if resp.status_code == 200: + results = resp.json() + if results: + _warn_multiple(results, identifier) + return self._parse_market(results[0]) + except Exception: + pass + raise MarketNotFound(f"Market {identifier} not found") + + return _fetch() + + def fetch_markets_by_slug(self, slug_or_url: str) -> List[Market]: + """ + Fetch all markets from an event by slug or URL. + + For events with multiple markets (e.g., "which day will X happen"), + this returns all markets in the event. + + Args: + slug_or_url: Event slug or full Polymarket URL + + Returns: + List of Market objects with token IDs populated + """ + slug = self.parse_market_identifier(slug_or_url) + + if not slug: + raise ValueError("Empty slug provided") + + try: + response = requests.get(f"{self.BASE_URL}/events?slug={slug}", timeout=self.timeout) + except requests.Timeout as e: + raise NetworkError(f"Request timeout: {e}") + except requests.ConnectionError as e: + raise NetworkError(f"Connection error: {e}") + except requests.RequestException as e: + raise NetworkError(f"Request failed: {e}") + + if response.status_code == 404: + raise MarketNotFound(f"Event not found: {slug}") + elif response.status_code != 200: + raise ExchangeError(f"Failed to fetch event: HTTP {response.status_code}") + + event_data = response.json() + if not event_data or len(event_data) == 0: + raise MarketNotFound(f"Event not found: {slug}") + + event = event_data[0] + markets_data = event.get("markets", []) + + if not markets_data: + raise MarketNotFound(f"No markets found in event: {slug}") + + markets = [] + for market_data in markets_data: + market = self._parse_market(market_data) + + # Compose readable_id: [event_slug, id] + market.metadata["readable_id"] = [slug, market.id] + + # Get token IDs from market data + clob_token_ids = market_data.get("clobTokenIds", []) + if isinstance(clob_token_ids, str): + try: + clob_token_ids = json.loads(clob_token_ids) + except json.JSONDecodeError: + clob_token_ids = [] + + if clob_token_ids: + market.metadata["clobTokenIds"] = clob_token_ids + + markets.append(market) + + return markets + + def search_markets( + self, + *, + # Gamma-side + limit: int = 200, + offset: int = 0, + order: str | None = "id", + ascending: bool | None = False, + closed: bool | None = False, + tag_id: int | None = None, + ids: Sequence[int] | None = None, + slugs: Sequence[str] | None = None, + clob_token_ids: Sequence[str] | None = None, + condition_ids: Sequence[str] | None = None, + market_maker_addresses: Sequence[str] | None = None, + liquidity_num_min: float | None = None, + liquidity_num_max: float | None = None, + volume_num_min: float | None = None, + volume_num_max: float | None = None, + start_date_min: datetime | None = None, + start_date_max: datetime | None = None, + end_date_min: datetime | None = None, + end_date_max: datetime | None = None, + related_tags: bool | None = None, + cyom: bool | None = None, + uma_resolution_status: str | None = None, + game_id: str | None = None, + sports_market_types: Sequence[str] | None = None, + rewards_min_size: float | None = None, + question_ids: Sequence[str] | None = None, + include_tag: bool | None = None, + extra_params: Dict[str, Any] | None = None, + # Client-side + query: str | None = None, + keywords: Sequence[str] | None = None, + binary: bool | None = None, + min_liquidity: float = 0.0, + categories: Sequence[str] | None = None, + outcomes: Sequence[str] | None = None, + predicate: Callable[[Market], bool] | None = None, + # Log + log: bool | None = False, + ) -> List[Market]: + # ---------- 0) Pre-process ---------- + total_limit = int(limit) + if total_limit <= 0: + return [] + + initial_offset = max(0, int(offset)) + default_page_size_markets = 200 + page_size = min(default_page_size_markets, total_limit) + + def _dt(v: datetime | None) -> str | None: + return v.isoformat() if isinstance(v, datetime) else None + + def _lower_list(values: Sequence[str] | None) -> List[str]: + return [v.lower() for v in values] if values else [] + + query_lower = query.lower() if query else None + keyword_lowers = _lower_list(keywords) + category_lowers = _lower_list(categories) + outcome_lowers = _lower_list(outcomes) + + # ---------- 1) Gamma-side params ---------- + gamma_params: Dict[str, Any] = {} + + if order is not None: + gamma_params["order"] = order + if ascending is not None: + gamma_params["ascending"] = ascending + + if closed is not None: + gamma_params["closed"] = closed + if tag_id is not None: + gamma_params["tag_id"] = tag_id + + if ids: + gamma_params["id"] = list(ids) + if slugs: + gamma_params["slug"] = list(slugs) + if clob_token_ids: + gamma_params["clob_token_ids"] = list(clob_token_ids) + if condition_ids: + gamma_params["condition_ids"] = list(condition_ids) + if market_maker_addresses: + gamma_params["market_maker_address"] = list(market_maker_addresses) + + if liquidity_num_min is not None: + gamma_params["liquidity_num_min"] = liquidity_num_min + if liquidity_num_max is not None: + gamma_params["liquidity_num_max"] = liquidity_num_max + if volume_num_min is not None: + gamma_params["volume_num_min"] = volume_num_min + if volume_num_max is not None: + gamma_params["volume_num_max"] = volume_num_max + + if v := _dt(start_date_min): + gamma_params["start_date_min"] = v + if v := _dt(start_date_max): + gamma_params["start_date_max"] = v + if v := _dt(end_date_min): + gamma_params["end_date_min"] = v + if v := _dt(end_date_max): + gamma_params["end_date_max"] = v + + if related_tags is not None: + gamma_params["related_tags"] = related_tags + if cyom is not None: + gamma_params["cyom"] = cyom + if uma_resolution_status is not None: + gamma_params["uma_resolution_status"] = uma_resolution_status + if game_id is not None: + gamma_params["game_id"] = game_id + if sports_market_types: + gamma_params["sports_market_types"] = list(sports_market_types) + if rewards_min_size is not None: + gamma_params["rewards_min_size"] = rewards_min_size + if question_ids: + gamma_params["question_ids"] = list(question_ids) + if include_tag is not None: + gamma_params["include_tag"] = include_tag + if extra_params: + gamma_params.update(extra_params) + + # ---------- 2) Gamma pagination via helper ---------- + @self._retry_on_failure + def _fetch_page(offset_: int, limit_: int) -> List[Market]: + params = { + **gamma_params, + "limit": limit_, + "offset": offset_, + } + resp = requests.get( + f"{self.BASE_URL}/markets", + params=params, + timeout=self.timeout, + ) + resp.raise_for_status() + raw = resp.json() + if not isinstance(raw, list): + raise ExchangeError("Gamma /markets response must be a list.") + return [self._parse_market(m) for m in raw] + + gamma_results: List[Market] = self._collect_paginated( + _fetch_page, + total_limit=total_limit, + initial_offset=initial_offset, + page_size=page_size, + dedup_key=None, + log=log, + ) + + # ---------- 3) Client-side filtering ---------- + filtered: List[Market] = [] + + for m in gamma_results: + if binary is not None and m.is_binary != binary: + continue + if m.liquidity < min_liquidity: + continue + if outcome_lowers: + outs = [o.lower() for o in m.outcomes] + if not all(x in outs for x in outcome_lowers): + continue + if category_lowers: + cats = self._extract_categories(m) + if not cats or not any(c in cats for c in category_lowers): + continue + if query_lower or keyword_lowers: + text = self._build_search_text(m) + if query_lower and query_lower not in text: + continue + if any(k not in text for k in keyword_lowers): + continue + if predicate and not predicate(m): + continue + filtered.append(m) + + if len(filtered) > total_limit: + filtered = filtered[:total_limit] + + return filtered + + def get_tag_by_slug(self, slug: str) -> Tag: + if not slug: + raise ValueError("slug must be a non-empty string") + + url = f"{self.BASE_URL}/tags/slug/{slug}" + + @self._retry_on_failure + def _fetch() -> dict: + resp = requests.get(url, timeout=self.timeout) + resp.raise_for_status() + data = resp.json() + if not isinstance(data, dict): + raise ExchangeError("Gamma get_tag_by_slug response must be an object.") + return data + + data = _fetch() + + return Tag( + id=str(data.get("id", "")), + label=data.get("label"), + slug=data.get("slug"), + force_show=data.get("forceShow"), + force_hide=data.get("forceHide"), + is_carousel=data.get("isCarousel"), + published_at=data.get("publishedAt"), + created_at=data.get("createdAt"), + updated_at=data.get("UpdatedAt") if "UpdatedAt" in data else data.get("updatedAt"), + raw=data, + ) + + def find_crypto_hourly_market( + self, + token_symbol: Optional[str] = None, + min_liquidity: float = 0.0, + limit: int = 100, + is_active: bool = True, + is_expired: bool = False, + params: Optional[Dict[str, Any]] = None, + ) -> Optional[tuple[Market, Any]]: + """ + Find crypto hourly markets on Polymarket using tag-based filtering. + + Polymarket uses TAG_1H for 1-hour crypto price markets, which is more + efficient than pattern matching on all markets. + + Args: + token_symbol: Filter by token (e.g., "BTC", "ETH", "SOL") + min_liquidity: Minimum liquidity required + limit: Maximum markets to fetch + is_active: If True, only return markets currently in progress (expiring within 1 hour) + is_expired: If True, only return expired markets. If False, exclude expired markets. + params: Additional parameters (can include 'tag_id' to override default tag) + + Returns: + Tuple of (Market, CryptoHourlyMarket) or None + """ + logger = setup_logger(__name__) + + # Use tag-based filtering for efficiency + tag_id = (params or {}).get("tag_id", self.TAG_1H) + + if self.verbose: + logger.info(f"Searching for crypto hourly markets with tag: {tag_id}") + + all_markets = [] + offset = 0 + page_size = 100 + + while len(all_markets) < limit: + # Use gamma-api with tag filtering + url = f"{self.BASE_URL}/markets" + query_params = { + "active": "true", + "closed": "false", + "limit": min(page_size, limit - len(all_markets)), + "offset": offset, + "order": "volume", + "ascending": "false", + } + + if tag_id: + query_params["tag_id"] = tag_id + + try: + response = requests.get(url, params=query_params, timeout=10) + response.raise_for_status() + data = response.json() + + markets_data = data if isinstance(data, list) else [] + if not markets_data: + break + + # Parse markets + for market_data in markets_data: + market = self._parse_market(market_data) + if market: + all_markets.append(market) + + offset += len(markets_data) + + # If we got fewer markets than requested, we've reached the end + if len(markets_data) < page_size: + break + + except Exception as e: + if self.verbose: + logger.error(f"Failed to fetch tagged markets: {e}") + break + + if self.verbose: + logger.info(f"Found {len(all_markets)} markets with tag {tag_id}") + + # Now parse and filter the markets + # Pattern for "Up or Down" markets (e.g., "Bitcoin Up or Down - November 2, 7AM ET") + up_down_pattern = re.compile( + r"(?PBitcoin|Ethereum|Solana|BTC|ETH|SOL|XRP)\s+Up or Down", re.IGNORECASE + ) + + # Pattern for strike price markets (e.g., "Will BTC be above $95,000 at 5:00 PM ET?") + strike_pattern = re.compile( + r"(?:(?PBTC|ETH|SOL|BITCOIN|ETHEREUM|SOLANA)\s+.*?" + r"(?Pabove|below|over|under|reach)\s+" + r"[\$]?(?P[\d,]+(?:\.\d+)?))|" + r"(?:[\$]?(?P[\d,]+(?:\.\d+)?)\s+.*?" + r"(?PBTC|ETH|SOL|BITCOIN|ETHEREUM|SOLANA))", + re.IGNORECASE, + ) + + for market in all_markets: + # Must be binary and open + if not market.is_binary or not market.is_open: + continue + + # Check liquidity + if market.liquidity < min_liquidity: + continue + + # Check expiry time filtering based on is_active and is_expired parameters + if market.close_time: + # Handle timezone-aware datetime + if market.close_time.tzinfo is not None: + now = datetime.now(timezone.utc) + else: + now = datetime.now() + + time_until_expiry = (market.close_time - now).total_seconds() + + # Apply is_expired filter + if is_expired: + # Only include expired markets + if time_until_expiry > 0: + continue + else: + # Exclude expired markets + if time_until_expiry <= 0: + continue + + # Apply is_active filter (only applies to non-expired markets) + if is_active and not is_expired: + # For active hourly markets, only include if expiring within 1 hour + # This ensures we get currently active hourly candles + if time_until_expiry > 3600: # 1 hour in seconds + continue + + # Try "Up or Down" pattern first + up_down_match = up_down_pattern.search(market.question) + if up_down_match: + parsed_token = self.normalize_token(up_down_match.group("token")) + + # Apply token filter + if token_symbol and parsed_token != self.normalize_token(token_symbol): + continue + + expiry = ( + market.close_time if market.close_time else datetime.now() + timedelta(hours=1) + ) + + crypto_market = CryptoHourlyMarket( + token_symbol=parsed_token, + expiry_time=expiry, + strike_price=None, + market_type="up_down", + ) + + return (market, crypto_market) + + # Try strike price pattern + strike_match = strike_pattern.search(market.question) + if strike_match: + parsed_token = self.normalize_token( + strike_match.group("token1") or strike_match.group("token2") or "" + ) + parsed_price_str = ( + strike_match.group("price1") or strike_match.group("price2") or "0" + ) + parsed_price = float(parsed_price_str.replace(",", "")) + + # Apply filters + if token_symbol and parsed_token != self.normalize_token(token_symbol): + continue + + expiry = ( + market.close_time if market.close_time else datetime.now() + timedelta(hours=1) + ) + + crypto_market = CryptoHourlyMarket( + token_symbol=parsed_token, + expiry_time=expiry, + strike_price=parsed_price, + market_type="strike_price", + ) + + return (market, crypto_market) + + return None + + def _parse_sampling_market(self, data: Dict[str, Any]) -> Optional[Market]: + """Parse market data from CLOB sampling-markets API response""" + try: + # sampling-markets includes more fields than simplified-markets + condition_id = data.get("condition_id") + if not condition_id: + return None + + # Extract question and description + question = data.get("question", "") + + # Extract tick size (minimum price increment) + # The API returns minimum_tick_size (e.g., 0.01 or 0.001) + # Note: minimum_order_size is different - it's the min shares per order + # Default to 0.01 (standard Polymarket tick size) if not provided + minimum_tick_size = data.get("minimum_tick_size", 0.01) + + # Extract tokens - sampling-markets has them in "tokens" array + tokens_data = data.get("tokens", []) + token_ids = [] + outcomes = [] + prices = {} + + for token in tokens_data: + if isinstance(token, dict): + token_id = token.get("token_id") + outcome = token.get("outcome", "") + price = token.get("price") + + if token_id: + token_ids.append(str(token_id)) + if outcome: + outcomes.append(outcome) + if outcome and price is not None: + try: + prices[outcome] = float(price) + except (ValueError, TypeError): + pass + + # Build metadata with token IDs + metadata = { + **data, + "clobTokenIds": token_ids, + "condition_id": condition_id, + "minimum_tick_size": minimum_tick_size, + } + + return Market( + id=condition_id, + question=question, + outcomes=outcomes if outcomes else ["Yes", "No"], + close_time=None, # Can parse if needed + volume=0, # Not in sampling-markets + liquidity=0, # Not in sampling-markets + prices=prices, + metadata=metadata, + tick_size=minimum_tick_size, + description=data.get("description", ""), + ) + except Exception as e: + if self.verbose: + print(f"Error parsing sampling market: {e}") + return None + + def _parse_clob_market(self, data: Dict[str, Any]) -> Optional[Market]: + """Parse market data from CLOB API response""" + try: + # CLOB API structure + condition_id = data.get("condition_id") + if not condition_id: + return None + + # Extract tokens (already have token_id, outcome, price, winner) + tokens = data.get("tokens", []) + token_ids = [] + outcomes = [] + prices = {} + + for token in tokens: + if isinstance(token, dict): + token_id = token.get("token_id") + outcome = token.get("outcome", "") + price = token.get("price") + + if token_id: + token_ids.append(str(token_id)) + if outcome: + outcomes.append(outcome) + if outcome and price is not None: + try: + prices[outcome] = float(price) + except (ValueError, TypeError): + pass + + # Build metadata with token IDs already included + # Default to 0.01 (standard Polymarket tick size) if not provided + minimum_tick_size = data.get("minimum_tick_size", 0.01) + metadata = { + **data, + "clobTokenIds": token_ids, + "condition_id": condition_id, + "minimum_tick_size": minimum_tick_size, + } + + return Market( + id=condition_id, + question="", # CLOB API doesn't include question text + outcomes=outcomes if outcomes else ["Yes", "No"], + close_time=None, # CLOB API doesn't include end date + volume=0, # CLOB API doesn't include volume + liquidity=0, # CLOB API doesn't include liquidity + prices=prices, + metadata=metadata, + tick_size=minimum_tick_size, + description=data.get("description", ""), + ) + except Exception as e: + if self.verbose: + print(f"Error parsing CLOB market: {e}") + return None + + def _parse_market(self, data: Dict[str, Any]) -> Market: + """Parse market data from API response""" + # Parse outcomes - can be JSON string or list + outcomes_raw = data.get("outcomes", []) + if isinstance(outcomes_raw, str): + try: + outcomes = json.loads(outcomes_raw) + except (json.JSONDecodeError, TypeError): + outcomes = [] + else: + outcomes = outcomes_raw + + # Parse outcome prices - can be JSON string, list, or None + prices_raw = data.get("outcomePrices") + prices_list = [] + + if prices_raw is not None: + if isinstance(prices_raw, str): + try: + prices_list = json.loads(prices_raw) + except (json.JSONDecodeError, TypeError): + prices_list = [] + else: + prices_list = prices_raw + + # Create prices dictionary mapping outcomes to prices + prices = {} + if len(outcomes) == len(prices_list) and prices_list: + for outcome, price in zip(outcomes, prices_list): + try: + price_val = float(price) + # Only add non-zero prices + if price_val > 0: + prices[outcome] = price_val + except (ValueError, TypeError): + pass + + # Fallback: use bestBid/bestAsk if available and no prices found + if not prices and len(outcomes) == 2: + best_bid = data.get("bestBid") + best_ask = data.get("bestAsk") + if best_bid is not None and best_ask is not None: + try: + bid = float(best_bid) + ask = float(best_ask) + if 0 < bid < 1 and 0 < ask <= 1: + # For binary: Yes price ~ask, No price ~(1-ask) + prices[outcomes[0]] = ask + prices[outcomes[1]] = 1.0 - bid + except (ValueError, TypeError): + pass + + # Parse close time - check both endDate and closed status + close_time = self._parse_datetime(data.get("endDate")) + + # Use volumeNum if available, fallback to volume + volume = float(data.get("volumeNum", data.get("volume", 0))) + liquidity = float(data.get("liquidityNum", data.get("liquidity", 0))) + + # Try to extract token IDs from various possible fields + # Gamma API sometimes includes these in the response + metadata = dict(data) + + # Set match_id from groupItemTitle for cross-exchange matching + if "groupItemTitle" in data: + metadata["match_id"] = data["groupItemTitle"] + + if "tokens" in data and data["tokens"]: + metadata["clobTokenIds"] = data["tokens"] + elif "clobTokenIds" not in metadata and "tokenID" in data: + # Single token ID - might be a simplified response + metadata["clobTokenIds"] = [data["tokenID"]] + + # Ensure clobTokenIds is always a list, not a JSON string + if "clobTokenIds" in metadata and isinstance(metadata["clobTokenIds"], str): + try: + metadata["clobTokenIds"] = json.loads(metadata["clobTokenIds"]) + except (json.JSONDecodeError, TypeError): + # If parsing fails, remove it - will be fetched separately + del metadata["clobTokenIds"] + + # Extract tick size - default to 0.01 (standard Polymarket tick size) + # Gamma API may not include this field; CLOB API always does + minimum_tick_size = data.get("minimum_tick_size", 0.01) + metadata["minimum_tick_size"] = minimum_tick_size + + return Market( + id=data.get("id", ""), + question=data.get("question", ""), + outcomes=outcomes, + close_time=close_time, + volume=volume, + liquidity=liquidity, + prices=prices, + metadata=metadata, + tick_size=minimum_tick_size, + description=data.get("description", ""), + ) + + @staticmethod + def _extract_categories(market: Market) -> List[str]: + buckets: List[str] = [] + meta = market.metadata + + raw_cat = meta.get("category") + if isinstance(raw_cat, str): + buckets.append(raw_cat.lower()) + + for key in ("categories", "topics"): + raw = meta.get(key) + if isinstance(raw, str): + buckets.append(raw.lower()) + elif isinstance(raw, Iterable): + buckets.extend(str(item).lower() for item in raw) + + return buckets + + @staticmethod + def _build_search_text(market: Market) -> str: + meta = market.metadata + + base_fields = [ + market.question or "", + meta.get("description", ""), + ] + + extra_keys = [ + "slug", + "category", + "subtitle", + "seriesSlug", + "series", + "seriesTitle", + "seriesDescription", + "tags", + "topics", + "categories", + ] + + extras: List[str] = [] + for key in extra_keys: + value = meta.get(key) + if value is None: + continue + if isinstance(value, str): + extras.append(value) + elif isinstance(value, Iterable): + extras.extend(str(item).lower() for item in value) + else: + extras.append(str(value)) + + return " ".join(str(field) for field in (base_fields + extras)).lower() + + @staticmethod + def _parse_history(history: Iterable[Dict[str, Any]]) -> List[PricePoint]: + parsed: List[PricePoint] = [] + for row in history: + t = row.get("t") + p = row.get("p") + if t is None or p is None: + continue + parsed.append( + PricePoint( + timestamp=datetime.fromtimestamp(int(t), tz=timezone.utc), + price=float(p), + raw=row, + ) + ) + return sorted(parsed, key=lambda item: item.timestamp) + + # ========================================================================= + # New Gamma API methods + # ========================================================================= + + def fetch_events( + self, + limit: int = 100, + offset: int = 0, + slug: Optional[str] = None, + id: Optional[str] = None, + ) -> List[Dict]: + """ + Fetch events from the Gamma API. + + Args: + limit: Maximum number of events to return + offset: Pagination offset + slug: Filter by event slug + id: Filter by event ID + + Returns: + List of event dictionaries + """ + + @self._retry_on_failure + def _fetch(): + params: Dict[str, Any] = {"limit": limit, "offset": offset} + if slug: + params["slug"] = slug + if id: + params["id"] = id + resp = requests.get(f"{self.BASE_URL}/events", params=params, timeout=self.timeout) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_event(self, event_id: str) -> Dict: + """ + Fetch a single event by ID from the Gamma API. + + Args: + event_id: The event ID + + Returns: + Event dictionary + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/events/{event_id}", timeout=self.timeout) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def fetch_event_by_slug(self, slug: str) -> Dict: + """ + Fetch an event by slug from the Gamma API. + + Args: + slug: The event slug + + Returns: + Event dictionary (first match) + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get( + f"{self.BASE_URL}/events", + params={"slug": slug}, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + if isinstance(data, list) and data: + return data[0] + raise ExchangeError(f"Event not found: {slug}") + + return _fetch() + + def fetch_series(self, limit: int = 100, offset: int = 0) -> List[Dict]: + """ + Fetch series from the Gamma API. + + Args: + limit: Maximum number of series to return + offset: Pagination offset + + Returns: + List of series dictionaries + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get( + f"{self.BASE_URL}/series", + params={"limit": limit, "offset": offset}, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_series_by_id(self, series_id: str) -> Dict: + """ + Fetch a single series by ID from the Gamma API. + + Args: + series_id: The series ID + + Returns: + Series dictionary + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/series/{series_id}", timeout=self.timeout) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def get_gamma_status(self) -> Dict: + """ + Check Gamma API health. + + Returns: + Status dictionary with at least 'status_code' and 'ok' keys. + If the response body is valid JSON, its contents are merged in. + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/status", timeout=self.timeout) + resp.raise_for_status() + result: Dict[str, Any] = {"status_code": resp.status_code, "ok": resp.ok} + try: + body = resp.json() + if isinstance(body, dict): + result.update(body) + except Exception: + result["body"] = resp.text + return result + + return _fetch() + + def fetch_tags(self, limit: int = 100, offset: int = 0) -> List[Dict]: + """ + Fetch tag list from the Gamma API. + + Args: + limit: Maximum number of tags to return + offset: Pagination offset + + Returns: + List of tag dictionaries + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get( + f"{self.BASE_URL}/tags", + params={"limit": limit, "offset": offset}, + timeout=self.timeout, + ) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_tag_by_id(self, tag_id: str) -> Dict: + """ + Fetch a tag by ID from the Gamma API. + + Args: + tag_id: The tag ID + + Returns: + Tag dictionary + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/tags/{tag_id}", timeout=self.timeout) + resp.raise_for_status() + return resp.json() + + return _fetch() + + def fetch_market_tags(self, market: Market | str) -> List[Dict]: + """ + Fetch tags for a market from the Gamma API. + + Args: + market: Market object or Gamma numeric ID string + + Returns: + List of tag dictionaries + """ + market_id = self._resolve_gamma_id(market) + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/markets/{market_id}/tags", timeout=self.timeout) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_event_tags(self, event_id: str) -> List[Dict]: + """ + Fetch tags for an event from the Gamma API. + + Args: + event_id: The event ID + + Returns: + List of tag dictionaries + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/events/{event_id}/tags", timeout=self.timeout) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_sports_market_types(self) -> List[Dict]: + """ + Fetch valid sports market types from the Gamma API. + + Returns: + List of sports market type dictionaries + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/sports/market-types", timeout=self.timeout) + resp.raise_for_status() + data = resp.json() + return data if isinstance(data, list) else [] + + return _fetch() + + def fetch_sports_metadata(self) -> Dict: + """ + Fetch sports metadata from the Gamma API. + + Returns: + Sports metadata dictionary + """ + + @self._retry_on_failure + def _fetch(): + resp = requests.get(f"{self.BASE_URL}/sports", timeout=self.timeout) + resp.raise_for_status() + return resp.json() + + return _fetch() diff --git a/dr_manhattan/exchanges/polymarket/polymarket_operator.py b/dr_manhattan/exchanges/polymarket/polymarket_operator.py new file mode 100644 index 0000000..c1da697 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_operator.py @@ -0,0 +1,305 @@ +"""Polymarket exchange implementation using Operator pattern. + +This module provides a Polymarket exchange where the server acts as an operator, +trading on behalf of users who have approved the server's address. + +Security Model: +- Server has its own private key (stored securely on server) +- Users approve the server address as an operator on-chain +- Server signs orders with its own key, specifying user's address as funder +- Users can revoke approval anytime via Polymarket contract +""" + +import os +from datetime import datetime +from typing import Any, Dict, Optional + +from py_clob_client.client import ClobClient +from py_clob_client.clob_types import AssetType, BalanceAllowanceParams, OrderArgs, OrderType + +from ...base.errors import AuthenticationError, ExchangeError, InvalidOrder +from ...models.order import Order, OrderSide, OrderStatus, OrderTimeInForce +from ...models.position import Position +from . import Polymarket + + +class PolymarketOperator(Polymarket): + """Polymarket exchange using Operator pattern for server-wide trading. + + The server acts as an operator, signing orders on behalf of users who have + approved the server's address. This allows centralized trading without + users exposing their private keys. + + Server Config (from environment): + POLYMARKET_OPERATOR_KEY: Server's private key for signing + POLYMARKET_OPERATOR_ADDRESS: Server's address (derived from key) + + Per-Request Config: + user_address: The user's wallet address to trade for + + Prerequisites: + Users must approve the server address as operator on Polymarket: + 1. Go to Polymarket + 2. Call approveOperator(server_address) on the CTF Exchange contract + + Example: + # Server initialization (once at startup) + operator = PolymarketOperator({ + 'user_address': '0xUserWalletAddress...', + }) + + # Create order on behalf of user + order = operator.create_order(...) + """ + + def __init__(self, config: Optional[Dict[str, Any]] = None): + """Initialize Polymarket Operator. + + Args: + config: Must contain 'user_address' - the wallet to trade for + """ + from ...base.exchange import Exchange + + Exchange.__init__(self, config) + self._ws = None + self._user_ws = None + self._clob_client = None + self._address = None + + # Server's operator credentials from environment + self._operator_key = os.getenv("POLYMARKET_OPERATOR_KEY") + if not self._operator_key: + raise AuthenticationError( + "POLYMARKET_OPERATOR_KEY environment variable is required for operator mode" + ) + + # User's address to trade for (from per-request config) + self._user_address = self.config.get("user_address") + if not self._user_address: + raise AuthenticationError( + "user_address is required - provide the wallet address to trade for" + ) + + # These are set for compatibility with parent class + self.private_key = self._operator_key + self.funder = self._user_address # User's address as funder + + self._initialize_operator_client() + + def _initialize_operator_client(self): + """Initialize CLOB client in operator mode.""" + try: + chain_id = self.config.get("chain_id", 137) + # signature_type 0 = EOA (standard wallet) + signature_type = self.config.get("signature_type", 0) + + # Initialize with operator's key, user's address as funder + self._clob_client = ClobClient( + host=self.CLOB_URL, + key=self._operator_key, + chain_id=chain_id, + signature_type=signature_type, + funder=self._user_address, # Trade for this user + ) + + # Derive and set API credentials + api_creds = self._clob_client.create_or_derive_api_creds() + if not api_creds: + raise AuthenticationError("Failed to derive API credentials") + + self._clob_client.set_api_creds(api_creds) + + # Verify L2 mode + if self._clob_client.mode < 2: + raise AuthenticationError( + f"Client not in L2 mode (current mode: {self._clob_client.mode})" + ) + + # Store operator address + try: + self._address = self._clob_client.get_address() + except Exception: + self._address = None + + except AuthenticationError: + raise + except Exception as e: + raise AuthenticationError(f"Failed to initialize operator client: {e}") + + @property + def operator_address(self) -> Optional[str]: + """Get the server's operator address.""" + return self._address + + @property + def user_address(self) -> str: + """Get the user's address this instance trades for.""" + return self._user_address + + def create_order( + self, + market_id: str, + outcome: str, + side: OrderSide, + price: float, + size: float, + params: Optional[Dict[str, Any]] = None, + time_in_force: OrderTimeInForce = OrderTimeInForce.GTC, + ) -> Order: + """Create order on behalf of user. + + The order is signed by the operator but executes for the user's account. + User must have approved the operator address. + """ + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + token_id = params.get("token_id") if params else None + if not token_id: + raise InvalidOrder("token_id required in params") + + order_type_map = { + OrderTimeInForce.GTC: OrderType.GTC, + OrderTimeInForce.FOK: OrderType.FOK, + OrderTimeInForce.IOC: OrderType.GTD, + } + clob_order_type = order_type_map.get(time_in_force, OrderType.GTC) + + try: + order_args = OrderArgs( + token_id=token_id, + price=float(price), + size=float(size), + side=side.value.upper(), + ) + + signed_order = self._clob_client.create_order(order_args) + result = self._clob_client.post_order(signed_order, clob_order_type) + + order_id = result.get("orderID", "") if isinstance(result, dict) else str(result) + status_str = result.get("status", "LIVE") if isinstance(result, dict) else "LIVE" + + status_map = { + "LIVE": OrderStatus.OPEN, + "MATCHED": OrderStatus.FILLED, + "CANCELLED": OrderStatus.CANCELLED, + } + + return Order( + id=order_id, + market_id=market_id, + outcome=outcome, + side=side, + price=price, + size=size, + filled=0, + status=status_map.get(status_str, OrderStatus.OPEN), + created_at=datetime.now(), + updated_at=datetime.now(), + time_in_force=time_in_force, + ) + + except Exception as e: + error_msg = str(e) + if "not approved" in error_msg.lower() or "operator" in error_msg.lower(): + raise InvalidOrder( + f"User {self._user_address} has not approved operator. " + f"Please approve the operator address first." + ) + raise InvalidOrder(f"Order placement failed: {error_msg}") + + def cancel_order(self, order_id: str, market_id: Optional[str] = None) -> Order: + """Cancel order on behalf of user.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + try: + result = self._clob_client.cancel(order_id) + if isinstance(result, dict): + return self._parse_order(result) + return Order( + id=order_id, + market_id=market_id or "", + outcome="", + side=OrderSide.BUY, + price=0, + size=0, + filled=0, + status=OrderStatus.CANCELLED, + created_at=datetime.now(), + updated_at=datetime.now(), + ) + except Exception as e: + raise InvalidOrder(f"Failed to cancel order {order_id}: {str(e)}") + + def fetch_balance(self) -> Dict[str, float]: + """Fetch user's balance (not operator's).""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + try: + params = BalanceAllowanceParams(asset_type=AssetType.COLLATERAL) + balance_data = self._clob_client.get_balance_allowance(params=params) + + usdc_balance = 0.0 + if isinstance(balance_data, dict) and "balance" in balance_data: + try: + usdc_balance = float(balance_data["balance"]) / 1e6 + except (ValueError, TypeError): + usdc_balance = 0.0 + + return {"USDC": usdc_balance} + + except Exception as e: + raise ExchangeError(f"Failed to fetch balance: {str(e)}") + + def fetch_open_orders( + self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None + ) -> list[Order]: + """Fetch user's open orders.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + try: + response = self._clob_client.get_orders() + + if isinstance(response, list): + orders = response + elif isinstance(response, dict) and "data" in response: + orders = response["data"] + else: + return [] + + if not orders: + return [] + + if market_id: + orders = [o for o in orders if o.get("market") == market_id] + + return [self._parse_order(order) for order in orders] + except Exception as e: + if self.verbose: + print(f"Warning: Failed to fetch open orders: {e}") + return [] + + def fetch_positions( + self, market_id: Optional[str] = None, params: Optional[Dict[str, Any]] = None + ) -> list[Position]: + """Fetch user's positions.""" + if not self._clob_client: + raise AuthenticationError("CLOB client not initialized.") + + if not market_id: + return [] + + return [] + + def check_operator_approval(self) -> bool: + """Check if user has approved the operator. + + Returns: + True if user has approved operator, False otherwise + """ + # This would require checking the CTF Exchange contract + # For now, we'll rely on order placement errors to detect this + return True # Assume approved, error on order if not diff --git a/dr_manhattan/exchanges/polymarket_ws.py b/dr_manhattan/exchanges/polymarket/polymarket_ws.py similarity index 99% rename from dr_manhattan/exchanges/polymarket_ws.py rename to dr_manhattan/exchanges/polymarket/polymarket_ws.py index 8b2a5c5..b334e52 100644 --- a/dr_manhattan/exchanges/polymarket_ws.py +++ b/dr_manhattan/exchanges/polymarket/polymarket_ws.py @@ -11,8 +11,8 @@ import websockets import websockets.exceptions -from ..base.websocket import OrderBookWebSocket -from ..models.orderbook import OrderbookManager +from ...base.websocket import OrderBookWebSocket +from ...models.orderbook import OrderbookManager logger = logging.getLogger(__name__) diff --git a/dr_manhattan/exchanges/polymarket/polymarket_ws_ext.py b/dr_manhattan/exchanges/polymarket/polymarket_ws_ext.py new file mode 100644 index 0000000..6f8e857 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket/polymarket_ws_ext.py @@ -0,0 +1,231 @@ +from __future__ import annotations + +import asyncio +import json +import logging +import threading +from typing import Any, Callable, Dict, List, Optional + +import websockets +import websockets.exceptions + +logger = logging.getLogger(__name__) + + +class PolymarketSportsWebSocket: + """ + Sports market real-time WebSocket. + + Connects to the Polymarket CLOB WebSocket for sports market updates. + Follows the same pattern as PolymarketWebSocket from polymarket_ws.py. + """ + + WS_URL = "wss://ws-subscriptions-clob.polymarket.com/ws/market" + + def __init__(self, verbose: bool = False): + self.verbose = verbose + self.ws = None + self._thread: Optional[threading.Thread] = None + self._running = False + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._callbacks: Dict[str, List[Callable]] = {} + self._subscribed_markets: List[str] = [] + + def on_update(self, callback: Callable[[Dict[str, Any]], None]) -> None: + """Register a callback for market updates.""" + self._callbacks.setdefault("update", []).append(callback) + + def on_error(self, callback: Callable[[Exception], None]) -> None: + """Register a callback for errors.""" + self._callbacks.setdefault("error", []).append(callback) + + def subscribe(self, market_ids: List[str]) -> None: + """ + Subscribe to sports market updates. + + Args: + market_ids: List of asset IDs (token IDs) to subscribe to + """ + self._subscribed_markets.extend(market_ids) + if self.ws and self._running: + asyncio.run_coroutine_threadsafe(self._send_subscribe(market_ids), self._loop) + + async def _send_subscribe(self, market_ids: List[str]) -> None: + """Send subscription message over WebSocket.""" + msg = { + "auth": {}, + "markets": [], + "assets_ids": market_ids, + "type": "market", + } + await self.ws.send(json.dumps(msg)) + if self.verbose: + logger.info(f"Subscribed to sports markets: {market_ids}") + + async def _listen(self) -> None: + """Main WebSocket listen loop.""" + while self._running: + try: + async with websockets.connect(self.WS_URL) as ws: + self.ws = ws + if self.verbose: + logger.info("Sports WebSocket connected") + + # Subscribe to any pending markets + if self._subscribed_markets: + await self._send_subscribe(self._subscribed_markets) + + async for message in ws: + try: + data = json.loads(message) + for cb in self._callbacks.get("update", []): + cb(data) + except json.JSONDecodeError: + if self.verbose: + logger.warning(f"Invalid JSON: {message[:100]}") + + except websockets.exceptions.ConnectionClosed as e: + if self.verbose: + logger.warning(f"Sports WebSocket closed: {e}") + if self._running: + await asyncio.sleep(2) + except Exception as e: + for cb in self._callbacks.get("error", []): + cb(e) + if self._running: + await asyncio.sleep(5) + + def start(self) -> None: + """Start the WebSocket in a background thread.""" + if self._running: + return + self._running = True + self._loop = asyncio.new_event_loop() + self._thread = threading.Thread(target=self._run_loop, daemon=True, name="sports-ws") + self._thread.start() + + def _run_loop(self) -> None: + asyncio.set_event_loop(self._loop) + self._loop.run_until_complete(self._listen()) + + def stop(self) -> None: + """Stop the WebSocket.""" + self._running = False + if self.ws: + asyncio.run_coroutine_threadsafe(self.ws.close(), self._loop) + if self._thread: + self._thread.join(timeout=5) + + +class PolymarketRTDSWebSocket: + """ + Real-Time Data Stream WebSocket for crypto prices and comments. + + Follows the same pattern as PolymarketWebSocket from polymarket_ws.py. + """ + + WS_URL = "wss://ws-subscriptions-clob.polymarket.com/ws/market" + + def __init__(self, verbose: bool = False): + self.verbose = verbose + self.ws = None + self._thread: Optional[threading.Thread] = None + self._running = False + self._loop: Optional[asyncio.AbstractEventLoop] = None + self._callbacks: Dict[str, List[Callable]] = {} + self._subscribed_assets: List[str] = [] + + def on_price(self, callback: Callable[[Dict[str, Any]], None]) -> None: + """Register a callback for price updates.""" + self._callbacks.setdefault("price", []).append(callback) + + def on_comment(self, callback: Callable[[Dict[str, Any]], None]) -> None: + """Register a callback for comment updates.""" + self._callbacks.setdefault("comment", []).append(callback) + + def on_error(self, callback: Callable[[Exception], None]) -> None: + """Register a callback for errors.""" + self._callbacks.setdefault("error", []).append(callback) + + def subscribe(self, asset_ids: List[str]) -> None: + """ + Subscribe to real-time data for assets. + + Args: + asset_ids: List of asset IDs (token IDs) to subscribe to + """ + self._subscribed_assets.extend(asset_ids) + if self.ws and self._running: + asyncio.run_coroutine_threadsafe(self._send_subscribe(asset_ids), self._loop) + + async def _send_subscribe(self, asset_ids: List[str]) -> None: + """Send subscription message over WebSocket.""" + msg = { + "auth": {}, + "markets": [], + "assets_ids": asset_ids, + "type": "market", + } + await self.ws.send(json.dumps(msg)) + if self.verbose: + logger.info(f"Subscribed to RTDS assets: {asset_ids}") + + async def _listen(self) -> None: + """Main WebSocket listen loop.""" + while self._running: + try: + async with websockets.connect(self.WS_URL) as ws: + self.ws = ws + if self.verbose: + logger.info("RTDS WebSocket connected") + + if self._subscribed_assets: + await self._send_subscribe(self._subscribed_assets) + + async for message in ws: + try: + data = json.loads(message) + # Route to appropriate callbacks based on message type + msg_type = data.get("type", "") + if msg_type == "comment": + for cb in self._callbacks.get("comment", []): + cb(data) + else: + # Default to price callback + for cb in self._callbacks.get("price", []): + cb(data) + except json.JSONDecodeError: + if self.verbose: + logger.warning(f"Invalid JSON: {message[:100]}") + + except websockets.exceptions.ConnectionClosed as e: + if self.verbose: + logger.warning(f"RTDS WebSocket closed: {e}") + if self._running: + await asyncio.sleep(2) + except Exception as e: + for cb in self._callbacks.get("error", []): + cb(e) + if self._running: + await asyncio.sleep(5) + + def start(self) -> None: + """Start the WebSocket in a background thread.""" + if self._running: + return + self._running = True + self._loop = asyncio.new_event_loop() + self._thread = threading.Thread(target=self._run_loop, daemon=True, name="rtds-ws") + self._thread.start() + + def _run_loop(self) -> None: + asyncio.set_event_loop(self._loop) + self._loop.run_until_complete(self._listen()) + + def stop(self) -> None: + """Stop the WebSocket.""" + self._running = False + if self.ws: + asyncio.run_coroutine_threadsafe(self.ws.close(), self._loop) + if self._thread: + self._thread.join(timeout=5) diff --git a/dr_manhattan/exchanges/polymarket_builder.py b/dr_manhattan/exchanges/polymarket_builder.py new file mode 100644 index 0000000..08bb101 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket_builder.py @@ -0,0 +1,9 @@ +"""Backward-compatible import for PolymarketBuilder. + +This module preserves the legacy import path: + dr_manhattan.exchanges.polymarket_builder +""" + +from .polymarket.polymarket_builder import PolymarketBuilder + +__all__ = ["PolymarketBuilder"] diff --git a/dr_manhattan/exchanges/polymarket_operator.py b/dr_manhattan/exchanges/polymarket_operator.py new file mode 100644 index 0000000..e483789 --- /dev/null +++ b/dr_manhattan/exchanges/polymarket_operator.py @@ -0,0 +1,9 @@ +"""Backward-compatible import for PolymarketOperator. + +This module preserves the legacy import path: + dr_manhattan.exchanges.polymarket_operator +""" + +from .polymarket.polymarket_operator import PolymarketOperator + +__all__ = ["PolymarketOperator"] diff --git a/dr_manhattan/mcp/server_sse.py b/dr_manhattan/mcp/server_sse.py new file mode 100644 index 0000000..b9cb01e --- /dev/null +++ b/dr_manhattan/mcp/server_sse.py @@ -0,0 +1,383 @@ +""" +Dr. Manhattan MCP Server - SSE Transport for Remote Access + +HTTP-based MCP server using Server-Sent Events (SSE) transport. +Allows remote Claude Desktop/Code connections without local installation. + +Usage: + python -m dr_manhattan.mcp.server_sse + +Environment: + PORT: Server port (default: 8080) + LOG_LEVEL: Logging level (default: INFO) + +Security: + - Write operations only supported for Polymarket (via Builder profile) + - Other exchanges are read-only (no private keys on server) + - Polymarket credentials: API key, secret, passphrase (no private key) + - Sensitive headers never logged + - HTTPS required in production (handled by Railway/hosting) +""" + +import asyncio +import contextvars +import json +import logging +import os +import signal +import sys +from pathlib import Path +from typing import Any, Dict, List, Optional + +# ============================================================================= +# CRITICAL: Logger patching MUST happen BEFORE importing dr_manhattan modules +# ============================================================================= + + +def _mcp_setup_logger(name: str = None, level: int = logging.INFO): + """MCP-compatible logger that outputs to stderr without colors.""" + logger = logging.getLogger(name) + logger.setLevel(level) + logger.handlers = [] + + handler = logging.StreamHandler(sys.stderr) + handler.setFormatter(logging.Formatter("[%(asctime)s] %(message)s", datefmt="%H:%M:%S")) + logger.addHandler(handler) + logger.propagate = False + + return logger + + +# Configure root logging to use stderr BEFORE any imports +log_level = getattr(logging, os.getenv("LOG_LEVEL", "INFO").upper(), logging.INFO) +logging.basicConfig( + level=log_level, + format="[%(asctime)s] %(message)s", + datefmt="%H:%M:%S", + stream=sys.stderr, + force=True, +) + +# Patch the logger module BEFORE importing dr_manhattan.utils +import dr_manhattan.utils.logger as logger_module # noqa: E402 + +logger_module.setup_logger = _mcp_setup_logger +logger_module.default_logger = _mcp_setup_logger("dr_manhattan") + +import dr_manhattan.utils # noqa: E402 + +dr_manhattan.utils.setup_logger = _mcp_setup_logger + +# Third-party imports after patching +from dotenv import load_dotenv # noqa: E402 +from mcp.server import Server # noqa: E402 +from mcp.server.sse import SseServerTransport # noqa: E402 +from mcp.types import TextContent, Tool # noqa: E402 +from starlette.applications import Starlette # noqa: E402 +from starlette.middleware import Middleware # noqa: E402 +from starlette.middleware.cors import CORSMiddleware # noqa: E402 +from starlette.requests import Request # noqa: E402 +from starlette.responses import JSONResponse, Response # noqa: E402 +from starlette.routing import Route # noqa: E402 + +# Load environment variables +env_path = Path(__file__).parent.parent.parent / ".env" +load_dotenv(env_path) + + +def fix_all_loggers(): + """Remove ALL handlers and configure only root logger with stderr.""" + root_logger = logging.getLogger() + for handler in root_logger.handlers[:]: + root_logger.removeHandler(handler) + + for name in logging.Logger.manager.loggerDict: + logger_obj = logging.getLogger(name) + if not isinstance(logger_obj, logging.Logger): + continue + for handler in logger_obj.handlers[:]: + logger_obj.removeHandler(handler) + logger_obj.propagate = True + + stderr_handler = logging.StreamHandler(sys.stderr) + stderr_handler.setFormatter(logging.Formatter("[%(asctime)s] %(message)s", datefmt="%H:%M:%S")) + root_logger.addHandler(stderr_handler) + root_logger.setLevel(log_level) + + +# Import modules after logger monkey-patching +from .session import ( # noqa: E402 + ExchangeSessionManager, + StrategySessionManager, + set_context_credentials_getter, +) +from .tools import TOOL_DISPATCH, get_tool_definitions # noqa: E402 +from .utils import ( # noqa: E402 + check_rate_limit, + get_credentials_from_headers, + sanitize_headers_for_logging, + translate_error, + validate_write_operation, +) + +# Fix loggers immediately after imports +fix_all_loggers() + +# Get logger for this module +logger = logging.getLogger(__name__) + +# Context variable to store current request credentials +_request_credentials: contextvars.ContextVar[Optional[Dict[str, Any]]] = contextvars.ContextVar( + "request_credentials", default=None +) + + +def get_current_credentials() -> Optional[Dict[str, Any]]: + """Get credentials from current request context.""" + return _request_credentials.get() + + +# Register the credentials getter with exchange manager +set_context_credentials_getter(get_current_credentials) + +# Initialize MCP server +mcp_app = Server("dr-manhattan") + +# SSE transport +sse_transport = SseServerTransport("/messages/") + +# Session managers +exchange_manager = ExchangeSessionManager() +strategy_manager = StrategySessionManager() + + +# ============================================================================= +# Tool Registration (shared with server.py via tools.definitions) +# ============================================================================= + + +@mcp_app.list_tools() +async def list_tools() -> List[Tool]: + """List all available MCP tools.""" + return get_tool_definitions() + + +@mcp_app.call_tool() +async def call_tool(name: str, arguments: Any) -> List[TextContent]: + """Handle tool execution with rate limiting and write operation validation.""" + try: + if not check_rate_limit(): + raise ValueError("Rate limit exceeded. Please wait before making more requests.") + + if name not in TOOL_DISPATCH: + raise ValueError(f"Unknown tool: {name}") + + # Validate write operations - only Polymarket allowed via Builder profile + exchange = arguments.get("exchange") if isinstance(arguments, dict) else None + is_allowed, error_msg = validate_write_operation(name, exchange) + if not is_allowed: + raise ValueError(error_msg) + + handler, requires_args = TOOL_DISPATCH[name] + result = handler(**arguments) if requires_args else handler() + + return [TextContent(type="text", text=json.dumps(result, indent=2))] + + except Exception as e: + mcp_error = translate_error(e, {"tool": name, "arguments": arguments}) + error_response = {"error": mcp_error.to_dict()} + return [TextContent(type="text", text=json.dumps(error_response, indent=2))] + + +# ============================================================================= +# HTTP Handlers +# ============================================================================= + + +async def handle_sse(request: Request) -> Response: + """Handle SSE connection for MCP.""" + # Extract and log headers (sanitized) + headers = dict(request.headers) + logger.info(f"SSE connection from {request.client.host if request.client else 'unknown'}") + logger.debug(f"Headers (sanitized): {sanitize_headers_for_logging(headers)}") + + # Extract credentials from headers and store in context + credentials = get_credentials_from_headers(headers) + token = _request_credentials.set(credentials) + + try: + async with sse_transport.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await mcp_app.run(streams[0], streams[1], mcp_app.create_initialization_options()) + finally: + _request_credentials.reset(token) + + return Response() + + +async def handle_messages(request: Request) -> Response: + """Handle POST messages for SSE transport.""" + # Extract credentials for this request + headers = dict(request.headers) + credentials = get_credentials_from_headers(headers) + token = _request_credentials.set(credentials) + + try: + return await sse_transport.handle_post_message( + request.scope, request.receive, request._send + ) + finally: + _request_credentials.reset(token) + + +async def health_check(request: Request) -> JSONResponse: + """Health check endpoint.""" + return JSONResponse( + { + "status": "healthy", + "service": "dr-manhattan-mcp", + "transport": "sse", + "version": "0.0.2", + } + ) + + +async def root(request: Request) -> JSONResponse: + """Root endpoint with usage info.""" + return JSONResponse( + { + "service": "Dr. Manhattan MCP Server", + "transport": "SSE", + "endpoints": { + "/sse": "MCP SSE connection endpoint", + "/messages/": "MCP message handling", + "/health": "Health check", + }, + "security": { + "write_operations": "Polymarket only (via Builder profile)", + "other_exchanges": "Read-only (fetch_markets, fetch_orderbook, etc.)", + }, + "usage": { + "read_only": { + "url": "https:///sse", + "note": "No headers needed for read-only access", + }, + "polymarket_trading": { + "url": "https:///sse", + "headers": { + "X-Polymarket-Api-Key": "", + "X-Polymarket-Api-Secret": "", + "X-Polymarket-Passphrase": "", + }, + "note": "Get credentials from Polymarket Builder profile", + }, + }, + } + ) + + +# ============================================================================= +# Starlette App +# ============================================================================= + +# CORS configuration - restrict origins for security +# MCP clients (Claude Desktop/Code) typically don't send Origin headers, +# so we allow specific known origins and handle no-origin requests +_cors_origins_env = os.getenv("CORS_ALLOWED_ORIGINS", "") +ALLOWED_ORIGINS: List[str] = [o.strip() for o in _cors_origins_env.split(",") if o.strip()] +if not ALLOWED_ORIGINS: + # Default: known MCP client origins + ALLOWED_ORIGINS = [ + "https://claude.ai", + "https://console.anthropic.com", + ] + +middleware = [ + Middleware( + CORSMiddleware, + allow_origins=ALLOWED_ORIGINS, + allow_methods=["GET", "POST", "OPTIONS"], + allow_headers=["*"], + allow_credentials=True, + ) +] + +routes = [ + Route("/", endpoint=root, methods=["GET"]), + Route("/health", endpoint=health_check, methods=["GET"]), + Route("/sse", endpoint=handle_sse, methods=["GET"]), + Route("/messages/", endpoint=handle_messages, methods=["POST"]), +] + +app = Starlette(routes=routes, middleware=middleware) + + +# ============================================================================= +# Cleanup and Main +# ============================================================================= + +_shutdown_requested = False + + +def cleanup_handler(signum, frame): + """Handle shutdown signal.""" + global _shutdown_requested + _shutdown_requested = True + sys.stderr.write("[SIGNAL] Shutdown requested, cleaning up...\n") + sys.stderr.flush() + + +async def cleanup(): + """Cleanup resources on shutdown.""" + logger.info("Shutting down MCP SSE server...") + await asyncio.to_thread(strategy_manager.cleanup) + await asyncio.to_thread(exchange_manager.cleanup) + logger.info("Cleanup complete") + + +def _validate_env() -> tuple[str, int]: + """Validate and return environment configuration.""" + host = os.getenv("HOST", "0.0.0.0") + port_str = os.getenv("PORT", "8080") + + # Validate port + try: + port = int(port_str) + if not (1 <= port <= 65535): + raise ValueError(f"Port must be 1-65535, got {port}") + except ValueError as e: + logger.error(f"Invalid PORT: {e}") + raise SystemExit(1) + + # Validate log level + log_level_str = os.getenv("LOG_LEVEL", "INFO").upper() + if log_level_str not in ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"): + logger.warning(f"Invalid LOG_LEVEL '{log_level_str}', using INFO") + + return host, port + + +def run_sse(): + """Run the SSE server.""" + import uvicorn + + signal.signal(signal.SIGINT, cleanup_handler) + signal.signal(signal.SIGTERM, cleanup_handler) + + host, port = _validate_env() + + logger.info(f"Starting Dr. Manhattan MCP SSE Server on {host}:{port}") + logger.info(f"CORS allowed origins: {ALLOWED_ORIGINS}") + + uvicorn.run( + app, + host=host, + port=port, + log_level="info", + access_log=True, + ) + + +if __name__ == "__main__": + run_sse() diff --git a/dr_manhattan/mcp/session/__init__.py b/dr_manhattan/mcp/session/__init__.py index e966ff8..fb7f61c 100644 --- a/dr_manhattan/mcp/session/__init__.py +++ b/dr_manhattan/mcp/session/__init__.py @@ -1,6 +1,6 @@ """Session management for MCP server.""" -from .exchange_manager import ExchangeSessionManager +from .exchange_manager import ExchangeSessionManager, set_context_credentials_getter from .models import SessionStatus, StrategySession from .strategy_manager import StrategySessionManager @@ -9,4 +9,5 @@ "StrategySessionManager", "StrategySession", "SessionStatus", + "set_context_credentials_getter", ] diff --git a/dr_manhattan/mcp/session/exchange_manager.py b/dr_manhattan/mcp/session/exchange_manager.py index 8c47ce8..cf5726a 100644 --- a/dr_manhattan/mcp/session/exchange_manager.py +++ b/dr_manhattan/mcp/session/exchange_manager.py @@ -4,13 +4,42 @@ import threading from concurrent.futures import ThreadPoolExecutor from concurrent.futures import TimeoutError as FutureTimeoutError -from typing import Any, Dict, Optional +from typing import Any, Callable, Dict, Optional from dr_manhattan.base import Exchange, ExchangeClient, create_exchange from dr_manhattan.utils import setup_logger logger = setup_logger(__name__) +# Callback to get credentials from request context (set by SSE server) +_context_credentials_getter: Optional[Callable[[], Optional[Dict[str, Any]]]] = None + + +def set_context_credentials_getter(getter: Optional[Callable[[], Optional[Dict[str, Any]]]]): + """ + Set the callback function for getting credentials from request context. + + Used by SSE server to provide per-request credentials. + + Args: + getter: Function that returns credentials dict or None + """ + global _context_credentials_getter + _context_credentials_getter = getter + + +def get_context_credentials() -> Optional[Dict[str, Any]]: + """ + Get credentials from current request context if available. + + Returns: + Credentials dict or None if not in SSE context + """ + if _context_credentials_getter is not None: + return _context_credentials_getter() + return None + + # Lock for credential operations (thread-safe access to MCP_CREDENTIALS) _CREDENTIALS_LOCK = threading.Lock() @@ -204,12 +233,92 @@ def __init__(self): """No-op: initialization done in __new__ to prevent race conditions.""" pass + def _create_exchange_with_credentials( + self, exchange_name: str, config_dict: Dict[str, Any] + ) -> Exchange: + """ + Create exchange instance with specific credentials. + + Internal method - does not cache the instance. + + Args: + exchange_name: Exchange name + config_dict: Credentials dictionary + + Returns: + Exchange instance + """ + from ...exchanges.limitless import Limitless + from ...exchanges.opinion import Opinion + from ...exchanges.polymarket import Polymarket + from ...exchanges.polymarket_builder import PolymarketBuilder + from ...exchanges.polymarket_operator import PolymarketOperator + + # For Polymarket, determine which mode to use: + # 1. Operator mode (preferred): user provides wallet address, server signs + # 2. Builder profile: user provides api_key, api_secret, api_passphrase + # 3. Direct mode: user provides private_key (local server only) + if exchange_name.lower() == "polymarket": + has_user_address = config_dict.get("user_address") + has_builder_creds = all( + config_dict.get(k) for k in ("api_key", "api_secret", "api_passphrase") + ) + has_private_key = config_dict.get("private_key") + + # Priority 1: Operator mode (user_address provided, server signs) + if has_user_address and not has_private_key: + logger.info(f"Using PolymarketOperator for {exchange_name} (Operator mode)") + config_dict["verbose"] = DEFAULT_VERBOSE + return _run_with_timeout( + PolymarketOperator, + args=(config_dict,), + timeout=EXCHANGE_INIT_TIMEOUT, + description=f"{exchange_name} Operator initialization", + ) + + # Priority 2: Builder profile (api credentials provided) + if has_builder_creds and not has_private_key: + logger.info(f"Using PolymarketBuilder for {exchange_name} (Builder profile)") + config_dict["verbose"] = DEFAULT_VERBOSE + return _run_with_timeout( + PolymarketBuilder, + args=(config_dict,), + timeout=EXCHANGE_INIT_TIMEOUT, + description=f"{exchange_name} Builder initialization", + ) + + exchange_classes = { + "polymarket": Polymarket, + "opinion": Opinion, + "limitless": Limitless, + } + + exchange_class = exchange_classes.get(exchange_name.lower()) + if not exchange_class: + raise ValueError(f"Unknown exchange: {exchange_name}") + + # Ensure verbose is False for MCP + config_dict["verbose"] = DEFAULT_VERBOSE + + logger.info(f"Initializing {exchange_name} with provided credentials...") + exchange = _run_with_timeout( + exchange_class, + args=(config_dict,), + timeout=EXCHANGE_INIT_TIMEOUT, + description=f"{exchange_name} initialization", + ) + logger.info(f"{exchange_name} initialized successfully") + return exchange + def get_exchange( self, exchange_name: str, use_env: bool = True, validate: bool = True ) -> Exchange: """ Get or create exchange instance. + Checks for context credentials first (SSE mode), then falls back + to environment credentials (local mode). + Args: exchange_name: Exchange name (polymarket, opinion, limitless) use_env: Load credentials from environment @@ -221,6 +330,48 @@ def get_exchange( Raises: ValueError: If exchange unknown or credentials invalid """ + # Check for context credentials (SSE mode - per-request credentials) + context_creds = get_context_credentials() + if context_creds: + exchange_creds = context_creds.get(exchange_name.lower()) + if exchange_creds: + # Validate required credentials (transport-agnostic messages) + if exchange_name.lower() == "polymarket": + # SSE mode supports two authentication methods: + # 1. Operator mode: user provides wallet address + signature + # 2. Builder profile: user provides api_key, api_secret, api_passphrase + has_user_address = exchange_creds.get("user_address") + has_builder_creds = all( + exchange_creds.get(k) for k in ("api_key", "api_secret", "api_passphrase") + ) + has_private_key = exchange_creds.get("private_key") + + if not has_user_address and not has_builder_creds and not has_private_key: + raise ValueError( + f"Missing credentials for {exchange_name}. " + "Please authenticate at dr-manhattan.io/approve" + ) + + # Validate signature for operator mode (security check) + if has_user_address and not has_private_key and not has_builder_creds: + from ..utils.security import validate_operator_credentials + + is_valid, error = validate_operator_credentials(exchange_creds) + if not is_valid: + raise ValueError(error) + elif exchange_name.lower() in ("limitless", "opinion"): + # Other exchanges still require private_key (not supported in SSE write mode) + if not exchange_creds.get("private_key"): + raise ValueError( + f"Missing private_key credential for {exchange_name}. " + "Please provide your private key." + ) + + logger.info(f"Using context credentials for {exchange_name} (SSE mode)") + # Create exchange without caching (each user has different credentials) + return self._create_exchange_with_credentials(exchange_name, exchange_creds) + + # Fall back to cached exchange with environment credentials (local mode) with self._instance_lock: if exchange_name not in self._exchanges: logger.info(f"Creating new exchange instance: {exchange_name}") @@ -241,30 +392,7 @@ def get_exchange( "Please set it in your .env file or environment." ) logger.info(f"Using MCP credentials for {exchange_name}") - # Create exchange directly with dict config (MCP-specific) - from ...exchanges.limitless import Limitless - from ...exchanges.opinion import Opinion - from ...exchanges.polymarket import Polymarket - - exchange_classes = { - "polymarket": Polymarket, - "opinion": Opinion, - "limitless": Limitless, - } - - exchange_class = exchange_classes.get(exchange_name.lower()) - if not exchange_class: - raise ValueError(f"Unknown exchange: {exchange_name}") - - # Initialize with timeout to avoid blocking - logger.info(f"Initializing {exchange_name} (this may take a moment)...") - exchange = _run_with_timeout( - exchange_class, - args=(config_dict,), - timeout=EXCHANGE_INIT_TIMEOUT, - description=f"{exchange_name} initialization", - ) - logger.info(f"{exchange_name} initialized successfully") + exchange = self._create_exchange_with_credentials(exchange_name, config_dict) else: exchange = create_exchange(exchange_name, use_env=use_env, validate=validate) diff --git a/dr_manhattan/mcp/tools/__init__.py b/dr_manhattan/mcp/tools/__init__.py index 4290c00..e86c690 100644 --- a/dr_manhattan/mcp/tools/__init__.py +++ b/dr_manhattan/mcp/tools/__init__.py @@ -1,3 +1,65 @@ """MCP Tools for dr-manhattan.""" -# Tools will be registered via decorators in each module +from . import account_tools, exchange_tools, market_tools, strategy_tools, trading_tools + +# Lazy imports for MCP-specific definitions (requires mcp package) +# These are only imported when explicitly accessed to avoid breaking +# tests that don't have the mcp package installed +_definitions_loaded = False +_TOOL_DISPATCH = None +_get_tool_definitions = None + + +def get_tool_definitions(): + """Get tool definitions (lazy import).""" + global _definitions_loaded, _get_tool_definitions + if not _definitions_loaded: + from .definitions import get_tool_definitions as _gtd + + _get_tool_definitions = _gtd + _definitions_loaded = True + return _get_tool_definitions() + + +def _get_dispatch(): + """Get tool dispatch table (lazy import).""" + global _definitions_loaded, _TOOL_DISPATCH + if not _definitions_loaded: + from .definitions import TOOL_DISPATCH + + _TOOL_DISPATCH = TOOL_DISPATCH + _definitions_loaded = True + return _TOOL_DISPATCH + + +# For backwards compatibility, expose TOOL_DISPATCH as a property-like access +class _ToolDispatchProxy: + """Proxy for lazy loading TOOL_DISPATCH.""" + + def __getitem__(self, key): + return _get_dispatch()[key] + + def __contains__(self, key): + return key in _get_dispatch() + + def keys(self): + return _get_dispatch().keys() + + def items(self): + return _get_dispatch().items() + + def values(self): + return _get_dispatch().values() + + +TOOL_DISPATCH = _ToolDispatchProxy() + +__all__ = [ + "account_tools", + "exchange_tools", + "market_tools", + "strategy_tools", + "trading_tools", + "get_tool_definitions", + "TOOL_DISPATCH", +] diff --git a/dr_manhattan/mcp/tools/definitions.py b/dr_manhattan/mcp/tools/definitions.py new file mode 100644 index 0000000..611a106 --- /dev/null +++ b/dr_manhattan/mcp/tools/definitions.py @@ -0,0 +1,420 @@ +"""Shared tool definitions for MCP servers. + +This module contains tool definitions and dispatch tables used by both +stdio (server.py) and SSE (server_sse.py) transports. + +Consolidating definitions here avoids code duplication and ensures +consistency between transport implementations. +""" + +from typing import Callable, Dict, List, Tuple + +from mcp.types import Tool + +from . import account_tools, exchange_tools, market_tools, strategy_tools, trading_tools + + +def get_tool_definitions() -> List[Tool]: + """ + Get all MCP tool definitions. + + Returns: + List of Tool objects for MCP registration + """ + return [ + # ================================================================= + # Exchange tools (3) + # ================================================================= + Tool( + name="list_exchanges", + description="List all available prediction market exchanges", + inputSchema={"type": "object", "properties": {}}, + ), + Tool( + name="get_exchange_info", + description="Get exchange metadata and capabilities", + inputSchema={ + "type": "object", + "properties": { + "exchange": { + "type": "string", + "description": "Exchange name (polymarket, opinion, limitless)", + } + }, + "required": ["exchange"], + }, + ), + Tool( + name="validate_credentials", + description="Validate exchange credentials without trading", + inputSchema={ + "type": "object", + "properties": {"exchange": {"type": "string", "description": "Exchange name"}}, + "required": ["exchange"], + }, + ), + # ================================================================= + # Market tools (11) + # ================================================================= + Tool( + name="fetch_markets", + description="Fetch ALL markets with pagination (slow, 100+ results). Use search_markets instead to find specific markets by name.", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string", "description": "Exchange name"}, + "limit": { + "type": "integer", + "description": "Max markets to return (default: 100, max: 500)", + "default": 100, + }, + "offset": { + "type": "integer", + "description": "Pagination offset (default: 0)", + "default": 0, + }, + "params": {"type": "object", "description": "Optional filters"}, + }, + "required": ["exchange"], + }, + ), + Tool( + name="search_markets", + description="RECOMMENDED: Search markets by keyword (fast). Use this first when user asks about specific topics.", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string", "description": "Exchange name"}, + "query": {"type": "string", "description": "Search keyword"}, + "limit": { + "type": "integer", + "description": "Max results (default: 20)", + "default": 20, + }, + }, + "required": ["exchange", "query"], + }, + ), + Tool( + name="fetch_market", + description="Fetch a specific market by ID", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string", "description": "Market identifier"}, + }, + "required": ["exchange", "market_id"], + }, + ), + Tool( + name="fetch_markets_by_slug", + description="Fetch markets by slug or URL (Polymarket, Limitless)", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "slug": {"type": "string", "description": "Market slug or full URL"}, + }, + "required": ["exchange", "slug"], + }, + ), + Tool( + name="get_orderbook", + description="Get orderbook for a token", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "token_id": {"type": "string", "description": "Token ID"}, + }, + "required": ["exchange", "token_id"], + }, + ), + Tool( + name="get_best_bid_ask", + description="Get best bid and ask prices", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "token_id": {"type": "string"}, + }, + "required": ["exchange", "token_id"], + }, + ), + Tool( + name="fetch_token_ids", + description="Get token IDs for a market", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string"}, + }, + "required": ["exchange", "market_id"], + }, + ), + Tool( + name="find_tradeable_market", + description="Find a tradeable market for an outcome", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string"}, + "outcome": {"type": "string"}, + }, + "required": ["exchange", "market_id", "outcome"], + }, + ), + Tool( + name="find_crypto_hourly_market", + description="Find hourly crypto prediction markets", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "symbol": {"type": "string", "description": "Crypto symbol (BTC, ETH)"}, + }, + "required": ["exchange", "symbol"], + }, + ), + Tool( + name="parse_market_identifier", + description="Parse market slug from URL", + inputSchema={ + "type": "object", + "properties": {"identifier": {"type": "string"}}, + "required": ["identifier"], + }, + ), + Tool( + name="get_tag_by_slug", + description="Get Polymarket tag information", + inputSchema={ + "type": "object", + "properties": {"slug": {"type": "string"}}, + "required": ["slug"], + }, + ), + # ================================================================= + # Trading tools (5) + # ================================================================= + Tool( + name="create_order", + description="Create a new order (requires credentials)", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string"}, + "outcome": {"type": "string", "description": "Outcome (Yes, No, etc.)"}, + "side": {"type": "string", "enum": ["buy", "sell"]}, + "price": {"type": "number", "minimum": 0, "maximum": 1}, + "size": {"type": "number", "minimum": 0}, + "params": {"type": "object"}, + }, + "required": ["exchange", "market_id", "outcome", "side", "price", "size"], + }, + ), + Tool( + name="cancel_order", + description="Cancel an existing order", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "order_id": {"type": "string"}, + "market_id": {"type": "string"}, + }, + "required": ["exchange", "order_id"], + }, + ), + Tool( + name="cancel_all_orders", + description="Cancel all open orders", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string"}, + }, + "required": ["exchange"], + }, + ), + Tool( + name="fetch_open_orders", + description="Fetch open orders", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string"}, + }, + "required": ["exchange"], + }, + ), + Tool( + name="fetch_order", + description="Fetch a specific order by ID", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "order_id": {"type": "string"}, + }, + "required": ["exchange", "order_id"], + }, + ), + # ================================================================= + # Account tools (4) + # ================================================================= + Tool( + name="fetch_balance", + description="Fetch account balance", + inputSchema={ + "type": "object", + "properties": {"exchange": {"type": "string"}}, + "required": ["exchange"], + }, + ), + Tool( + name="fetch_positions", + description="Fetch all positions", + inputSchema={ + "type": "object", + "properties": {"exchange": {"type": "string"}}, + "required": ["exchange"], + }, + ), + Tool( + name="calculate_nav", + description="Calculate Net Asset Value", + inputSchema={ + "type": "object", + "properties": {"exchange": {"type": "string"}}, + "required": ["exchange"], + }, + ), + Tool( + name="fetch_positions_for_market", + description="Fetch positions for a specific market", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "market_id": {"type": "string"}, + }, + "required": ["exchange", "market_id"], + }, + ), + # ================================================================= + # Strategy tools (7) + # ================================================================= + Tool( + name="create_strategy_session", + description="Create a new strategy session", + inputSchema={ + "type": "object", + "properties": { + "exchange": {"type": "string"}, + "strategy_name": {"type": "string"}, + "market_id": {"type": "string"}, + "params": {"type": "object"}, + }, + "required": ["exchange", "strategy_name", "market_id"], + }, + ), + Tool( + name="get_strategy_status", + description="Get strategy session status", + inputSchema={ + "type": "object", + "properties": {"session_id": {"type": "string"}}, + "required": ["session_id"], + }, + ), + Tool( + name="stop_strategy", + description="Stop a strategy session", + inputSchema={ + "type": "object", + "properties": {"session_id": {"type": "string"}}, + "required": ["session_id"], + }, + ), + Tool( + name="list_strategy_sessions", + description="List all strategy sessions", + inputSchema={"type": "object", "properties": {}}, + ), + Tool( + name="pause_strategy", + description="Pause a strategy session", + inputSchema={ + "type": "object", + "properties": {"session_id": {"type": "string"}}, + "required": ["session_id"], + }, + ), + Tool( + name="resume_strategy", + description="Resume a paused strategy session", + inputSchema={ + "type": "object", + "properties": {"session_id": {"type": "string"}}, + "required": ["session_id"], + }, + ), + Tool( + name="get_strategy_metrics", + description="Get strategy performance metrics", + inputSchema={ + "type": "object", + "properties": {"session_id": {"type": "string"}}, + "required": ["session_id"], + }, + ), + ] + + +# Tool dispatch table +# Format: tool_name -> (handler_function, requires_arguments) +TOOL_DISPATCH: Dict[str, Tuple[Callable, bool]] = { + # Exchange tools + "list_exchanges": (exchange_tools.list_exchanges, False), + "get_exchange_info": (exchange_tools.get_exchange_info, True), + "validate_credentials": (exchange_tools.validate_credentials, True), + # Market tools + "fetch_markets": (market_tools.fetch_markets, True), + "search_markets": (market_tools.search_markets, True), + "fetch_market": (market_tools.fetch_market, True), + "fetch_markets_by_slug": (market_tools.fetch_markets_by_slug, True), + "get_orderbook": (market_tools.get_orderbook, True), + "get_best_bid_ask": (market_tools.get_best_bid_ask, True), + "fetch_token_ids": (market_tools.fetch_token_ids, True), + "find_tradeable_market": (market_tools.find_tradeable_market, True), + "find_crypto_hourly_market": (market_tools.find_crypto_hourly_market, True), + "parse_market_identifier": (market_tools.parse_market_identifier, True), + "get_tag_by_slug": (market_tools.get_tag_by_slug, True), + # Trading tools + "create_order": (trading_tools.create_order, True), + "cancel_order": (trading_tools.cancel_order, True), + "cancel_all_orders": (trading_tools.cancel_all_orders, True), + "fetch_open_orders": (trading_tools.fetch_open_orders, True), + "fetch_order": (trading_tools.fetch_order, True), + # Account tools + "fetch_balance": (account_tools.fetch_balance, True), + "fetch_positions": (account_tools.fetch_positions, True), + "calculate_nav": (account_tools.calculate_nav, True), + "fetch_positions_for_market": (account_tools.fetch_positions_for_market, True), + # Strategy tools + "create_strategy_session": (strategy_tools.create_strategy_session, True), + "get_strategy_status": (strategy_tools.get_strategy_status, True), + "stop_strategy": (strategy_tools.stop_strategy, True), + "list_strategy_sessions": (strategy_tools.list_strategy_sessions, False), + "pause_strategy": (strategy_tools.pause_strategy, True), + "resume_strategy": (strategy_tools.resume_strategy, True), + "get_strategy_metrics": (strategy_tools.get_strategy_metrics, True), +} diff --git a/dr_manhattan/mcp/utils/__init__.py b/dr_manhattan/mcp/utils/__init__.py index f370691..765f1d0 100644 --- a/dr_manhattan/mcp/utils/__init__.py +++ b/dr_manhattan/mcp/utils/__init__.py @@ -2,6 +2,18 @@ from .errors import McpError, translate_error from .rate_limiter import RateLimiter, check_rate_limit, get_rate_limiter +from .security import ( + SENSITIVE_HEADERS, + SSE_WRITE_ENABLED_EXCHANGES, + WRITE_OPERATIONS, + get_credentials_from_headers, + has_any_credentials, + is_write_operation, + sanitize_error_message, + sanitize_headers_for_logging, + validate_credentials_present, + validate_write_operation, +) from .serializers import serialize_model from .validation import ( SUPPORTED_EXCHANGES, @@ -25,6 +37,18 @@ "RateLimiter", "check_rate_limit", "get_rate_limiter", + # Security + "SENSITIVE_HEADERS", + "SSE_WRITE_ENABLED_EXCHANGES", + "WRITE_OPERATIONS", + "get_credentials_from_headers", + "has_any_credentials", + "is_write_operation", + "sanitize_error_message", + "sanitize_headers_for_logging", + "validate_credentials_present", + "validate_write_operation", + # Validation "SUPPORTED_EXCHANGES", "validate_exchange", "validate_market_id", diff --git a/dr_manhattan/mcp/utils/security.py b/dr_manhattan/mcp/utils/security.py new file mode 100644 index 0000000..d95c5aa --- /dev/null +++ b/dr_manhattan/mcp/utils/security.py @@ -0,0 +1,354 @@ +"""Security utilities for MCP server. + +Provides functions for handling sensitive data safely in remote MCP environments. +""" + +import re +import time +from typing import Any, Dict, List, Optional + +from eth_account.messages import encode_defunct +from web3 import Web3 + +# Sensitive header names that should never be logged +SENSITIVE_HEADERS: List[str] = [ + # Polymarket (Builder profile - no private key needed) + "x-polymarket-api-key", + "x-polymarket-api-secret", + "x-polymarket-passphrase", + # Operator mode authentication + "x-polymarket-auth-signature", + # Generic + "authorization", + "x-api-key", +] + +# Header to credential mapping for each exchange +# SSE server supports Polymarket via: +# 1. Operator mode: user provides wallet address + signature, server signs on behalf +# 2. Builder profile: user provides api_key, api_secret, api_passphrase +HEADER_CREDENTIAL_MAP: Dict[str, Dict[str, str]] = { + "polymarket": { + # Operator mode (preferred for SSE) - requires signature for security + "x-polymarket-wallet-address": "user_address", + "x-polymarket-auth-signature": "auth_signature", + "x-polymarket-auth-timestamp": "auth_timestamp", + "x-polymarket-auth-expiry": "auth_expiry", + # Builder profile (alternative) + "x-polymarket-api-key": "api_key", + "x-polymarket-api-secret": "api_secret", + "x-polymarket-passphrase": "api_passphrase", + }, +} + +# Authentication message prefix (must match frontend) +AUTH_MESSAGE_PREFIX = "I authorize Dr. Manhattan to trade on Polymarket on my behalf." + +# Default signature validity (24 hours) - can be overridden by user +DEFAULT_SIGNATURE_VALIDITY_SECONDS = 86400 + +# Maximum allowed expiry (90 days) - security limit +MAX_SIGNATURE_VALIDITY_SECONDS = 7776000 + +# Allowed expiry options (must match frontend) +ALLOWED_EXPIRY_OPTIONS = [86400, 604800, 2592000, 7776000] # 24h, 7d, 30d, 90d + +# Write operations that modify state (require credentials) +WRITE_OPERATIONS: List[str] = [ + "create_order", + "cancel_order", + "cancel_all_orders", + "create_strategy_session", + "stop_strategy", + "pause_strategy", + "resume_strategy", +] + +# Exchanges that support write operations via SSE (Builder profile) +SSE_WRITE_ENABLED_EXCHANGES: List[str] = ["polymarket"] + +# Patterns that look like private keys or sensitive data +SENSITIVE_PATTERNS = [ + re.compile(r"0x[a-fA-F0-9]{64}"), # Ethereum private key + re.compile(r"[a-fA-F0-9]{64}"), # Raw hex key + re.compile(r"-----BEGIN.*PRIVATE KEY-----"), # RSA/EC private key +] + + +def is_sensitive_header(header_name: str) -> bool: + """Check if a header name is sensitive.""" + return header_name.lower() in SENSITIVE_HEADERS + + +def sanitize_headers_for_logging(headers: Dict[str, str]) -> Dict[str, str]: + """ + Sanitize headers for safe logging. + + Replaces sensitive header values with fully masked placeholders. + Does NOT expose any characters to prevent brute force hints. + + Args: + headers: Original headers dict + + Returns: + Headers dict with sensitive values fully masked + """ + sanitized = {} + for key, value in headers.items(): + if is_sensitive_header(key): + # Fully mask - do not expose any characters (security best practice) + sanitized[key] = "[REDACTED]" if value else "[EMPTY]" + else: + sanitized[key] = value + return sanitized + + +def sanitize_error_message(message: str) -> str: + """ + Remove sensitive data from error messages. + + Args: + message: Original error message + + Returns: + Message with sensitive patterns replaced + """ + result = message + for pattern in SENSITIVE_PATTERNS: + result = pattern.sub("[REDACTED]", result) + return result + + +def get_credentials_from_headers(headers: Dict[str, str]) -> Dict[str, Dict[str, Any]]: + """ + Extract exchange credentials from HTTP headers. + + Headers are expected in format: X-{Exchange}-{Credential} + e.g., X-Polymarket-Private-Key, X-Limitless-Private-Key + + Args: + headers: HTTP headers dict (case-insensitive keys) + + Returns: + Credentials dict keyed by exchange name + """ + # Normalize header keys to lowercase + normalized_headers = {k.lower(): v for k, v in headers.items()} + + credentials: Dict[str, Dict[str, Any]] = {} + + for exchange, header_map in HEADER_CREDENTIAL_MAP.items(): + exchange_creds: Dict[str, Any] = {} + + for header_name, cred_key in header_map.items(): + value = normalized_headers.get(header_name) + if value: + # Handle type conversion for specific fields + if cred_key == "signature_type": + try: + exchange_creds[cred_key] = int(value) + except ValueError: + exchange_creds[cred_key] = 0 # Default EOA + else: + exchange_creds[cred_key] = value + + # Only include exchange if it has at least one credential + if exchange_creds: + credentials[exchange] = exchange_creds + + return credentials + + +def validate_credentials_present( + credentials: Dict[str, Any], exchange: str +) -> tuple[bool, Optional[str]]: + """ + Validate that required credentials are present for an exchange. + + Returns transport-agnostic error messages. The transport layer (SSE, stdio) + should add transport-specific hints if needed. + + Args: + credentials: Credentials dict for the exchange + exchange: Exchange name + + Returns: + Tuple of (is_valid, error_message) + """ + # SSE server only supports Polymarket via Builder profile + required_fields = { + "polymarket": ["api_key", "api_secret", "api_passphrase"], + } + + required = required_fields.get(exchange.lower(), []) + missing = [field for field in required if not credentials.get(field)] + + if missing: + # Transport-agnostic message (no HTTP header references) + return False, f"Missing required credentials for {exchange}: {', '.join(missing)}" + + return True, None + + +def is_write_operation(tool_name: str) -> bool: + """Check if a tool is a write operation.""" + return tool_name in WRITE_OPERATIONS + + +def is_write_allowed_for_exchange(exchange: str) -> bool: + """Check if write operations are allowed for an exchange via SSE.""" + return exchange.lower() in SSE_WRITE_ENABLED_EXCHANGES + + +def validate_write_operation(tool_name: str, exchange: Optional[str]) -> tuple[bool, Optional[str]]: + """ + Validate that a write operation is allowed. + + SSE server only allows write operations for Polymarket (via Builder profile). + Other exchanges are read-only for security (no private keys on server). + + Args: + tool_name: The MCP tool being called + exchange: The target exchange (if applicable) + + Returns: + Tuple of (is_allowed, error_message) + """ + if not is_write_operation(tool_name): + return True, None + + if not exchange: + return False, f"Write operation '{tool_name}' requires an exchange parameter" + + if not is_write_allowed_for_exchange(exchange): + return ( + False, + f"Write operations are not supported for '{exchange}' via remote server. " + f"Only Polymarket is supported (via Builder profile). " + f"For other exchanges, use the local MCP server.", + ) + + return True, None + + +def get_header_hint_for_credential(exchange: str, credential: str) -> Optional[str]: + """ + Get the HTTP header name hint for a credential. + + This is a helper for SSE transport to provide user-friendly error messages. + + Args: + exchange: Exchange name + credential: Credential field name (e.g., 'private_key') + + Returns: + Header name (e.g., 'X-Polymarket-Private-Key') or None + """ + header_map = HEADER_CREDENTIAL_MAP.get(exchange.lower(), {}) + for header, cred_key in header_map.items(): + if cred_key == credential: + # Convert to title case for display (x-polymarket-private-key -> X-Polymarket-Private-Key) + return "-".join(word.title() for word in header.split("-")) + return None + + +def has_any_credentials(headers: Dict[str, str]) -> bool: + """Check if headers contain any exchange credentials.""" + normalized = {k.lower() for k in headers.keys()} + return any(h in normalized for h in SENSITIVE_HEADERS if h != "authorization") + + +def verify_wallet_signature( + wallet_address: str, signature: str, timestamp: str, expiry: Optional[str] = None +) -> tuple[bool, Optional[str]]: + """ + Verify that a signature proves ownership of a wallet address. + + The user must sign a message containing their wallet address, timestamp, and expiry. + This prevents replay attacks and proves wallet ownership. + + Args: + wallet_address: The claimed wallet address + signature: The signature of the auth message + timestamp: Unix timestamp when the message was signed + expiry: Expiry duration in seconds (optional, defaults to 24 hours) + + Returns: + Tuple of (is_valid, error_message) + """ + try: + # Parse and validate timestamp + ts = int(timestamp) + current_time = int(time.time()) + + # Parse and validate expiry + if expiry: + try: + expiry_seconds = int(expiry) + # Validate expiry is one of the allowed options + if expiry_seconds not in ALLOWED_EXPIRY_OPTIONS: + return False, f"Invalid expiry duration. Allowed: {ALLOWED_EXPIRY_OPTIONS}" + # Cap at maximum for security + expiry_seconds = min(expiry_seconds, MAX_SIGNATURE_VALIDITY_SECONDS) + except ValueError: + return False, "Invalid expiry format." + else: + expiry_seconds = DEFAULT_SIGNATURE_VALIDITY_SECONDS + + # Check if signature has expired + if current_time - ts > expiry_seconds: + return False, "Signature has expired. Please re-authenticate." + + # Check if timestamp is in the future (clock skew tolerance: 5 minutes) + if ts > current_time + 300: + return False, "Invalid timestamp (in future)." + + # Reconstruct the message that was signed (must match frontend format) + if expiry: + message = f"{AUTH_MESSAGE_PREFIX}\n\nWallet: {wallet_address}\nTimestamp: {timestamp}\nExpiry: {expiry}" + else: + # Legacy format without expiry (for backwards compatibility) + message = f"{AUTH_MESSAGE_PREFIX}\n\nWallet: {wallet_address}\nTimestamp: {timestamp}" + + # Verify the signature + w3 = Web3() + message_hash = encode_defunct(text=message) + recovered_address = w3.eth.account.recover_message(message_hash, signature=signature) + + # Compare addresses (case-insensitive) + if recovered_address.lower() != wallet_address.lower(): + return False, "Signature does not match wallet address." + + return True, None + + except ValueError as e: + return False, f"Invalid timestamp format: {e}" + except Exception as e: + return False, f"Signature verification failed: {e}" + + +def validate_operator_credentials(credentials: Dict[str, Any]) -> tuple[bool, Optional[str]]: + """ + Validate operator mode credentials (wallet address + signature). + + Args: + credentials: Credentials dict containing user_address, auth_signature, auth_timestamp, auth_expiry + + Returns: + Tuple of (is_valid, error_message) + """ + user_address = credentials.get("user_address") + signature = credentials.get("auth_signature") + timestamp = credentials.get("auth_timestamp") + expiry = credentials.get("auth_expiry") + + if not user_address: + return False, "Missing wallet address." + + if not signature or not timestamp: + return ( + False, + "Missing authentication signature. Please authenticate at dr-manhattan.io/approve", + ) + + return verify_wallet_signature(user_address, signature, timestamp, expiry) diff --git a/pyproject.toml b/pyproject.toml index cfe8d79..a6bc920 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,11 +38,11 @@ build-backend = "hatchling.build" [tool.black] line-length = 100 -target-version = ["py310"] +target-version = ["py311"] [tool.ruff] line-length = 100 -target-version = "py310" +target-version = "py311" [tool.ruff.lint] select = ["E", "F", "I", "N", "W"] @@ -72,10 +72,13 @@ packages = ["dr_manhattan"] [project.optional-dependencies] mcp = [ "mcp>=0.9.0", + "starlette>=0.36.0", + "uvicorn>=0.27.0", ] [project.scripts] dr-manhattan-mcp = "dr_manhattan.mcp.server:run" +dr-manhattan-mcp-sse = "dr_manhattan.mcp.server_sse:run_sse" [dependency-groups] dev = [ diff --git a/tests/mcp/test_server_sse.py b/tests/mcp/test_server_sse.py new file mode 100644 index 0000000..5ef0a34 --- /dev/null +++ b/tests/mcp/test_server_sse.py @@ -0,0 +1,372 @@ +"""Tests for MCP SSE server. + +Tests cover: +- Credential extraction from headers (Polymarket Builder profile only) +- Health check endpoint +- Credential masking in logs +- Credential validation +- Write operation restrictions + +Note: Tests that require the 'mcp' package are skipped if not installed. +""" + +# isort: skip_file +from unittest.mock import patch + +import pytest + +# Check if mcp package is available +try: + import mcp # noqa: F401 + + HAS_MCP = True +except ImportError: + HAS_MCP = False + + +class TestCredentialExtraction: + """Tests for extracting credentials from HTTP headers.""" + + def test_extract_polymarket_credentials(self): + """Test extraction of Polymarket Builder profile credentials.""" + from dr_manhattan.mcp.utils.security import get_credentials_from_headers + + headers = { + "X-Polymarket-Api-Key": "api_key_123", + "X-Polymarket-Api-Secret": "api_secret_456", + "X-Polymarket-Passphrase": "passphrase_789", + } + + credentials = get_credentials_from_headers(headers) + + assert "polymarket" in credentials + assert credentials["polymarket"]["api_key"] == "api_key_123" + assert credentials["polymarket"]["api_secret"] == "api_secret_456" + assert credentials["polymarket"]["api_passphrase"] == "passphrase_789" + + def test_case_insensitive_headers(self): + """Test that header extraction is case-insensitive.""" + from dr_manhattan.mcp.utils.security import get_credentials_from_headers + + headers = { + "x-polymarket-api-key": "key", + "X-POLYMARKET-API-SECRET": "secret", + "X-Polymarket-Passphrase": "pass", + } + + credentials = get_credentials_from_headers(headers) + + assert credentials["polymarket"]["api_key"] == "key" + assert credentials["polymarket"]["api_secret"] == "secret" + assert credentials["polymarket"]["api_passphrase"] == "pass" + + def test_empty_headers(self): + """Test extraction with no credential headers.""" + from dr_manhattan.mcp.utils.security import get_credentials_from_headers + + headers = {"Content-Type": "application/json", "Accept": "*/*"} + + credentials = get_credentials_from_headers(headers) + + assert credentials == {} + + def test_partial_credentials(self): + """Test extraction with only some Polymarket headers.""" + from dr_manhattan.mcp.utils.security import get_credentials_from_headers + + headers = {"X-Polymarket-Api-Key": "key_only"} + + credentials = get_credentials_from_headers(headers) + + # Should still extract partial credentials + assert "polymarket" in credentials + assert credentials["polymarket"]["api_key"] == "key_only" + assert "api_secret" not in credentials["polymarket"] + + +class TestCredentialMasking: + """Tests for credential masking in logs.""" + + def test_sensitive_headers_fully_masked(self): + """Test that sensitive headers are fully masked (no partial exposure).""" + from dr_manhattan.mcp.utils.security import sanitize_headers_for_logging + + headers = { + "X-Polymarket-Api-Key": "api_key_1234567890", + "Content-Type": "application/json", + } + + sanitized = sanitize_headers_for_logging(headers) + + # Should be fully redacted, not showing first/last chars + assert sanitized["X-Polymarket-Api-Key"] == "[REDACTED]" + assert sanitized["Content-Type"] == "application/json" + + def test_empty_sensitive_header_marked(self): + """Test that empty sensitive headers are marked as empty.""" + from dr_manhattan.mcp.utils.security import sanitize_headers_for_logging + + headers = {"X-Polymarket-Api-Key": "", "Content-Type": "application/json"} + + sanitized = sanitize_headers_for_logging(headers) + + assert sanitized["X-Polymarket-Api-Key"] == "[EMPTY]" + + def test_all_sensitive_headers_masked(self): + """Test that all known sensitive headers are masked.""" + from dr_manhattan.mcp.utils.security import ( + SENSITIVE_HEADERS, + sanitize_headers_for_logging, + ) + + headers = {h: "secret_value_123" for h in SENSITIVE_HEADERS} + headers["safe-header"] = "visible" + + sanitized = sanitize_headers_for_logging(headers) + + for header in SENSITIVE_HEADERS: + assert sanitized[header] == "[REDACTED]" + assert sanitized["safe-header"] == "visible" + + +class TestCredentialValidation: + """Tests for credential validation.""" + + def test_validate_polymarket_credentials_valid(self): + """Test validation passes with all required Polymarket credentials.""" + from dr_manhattan.mcp.utils.security import validate_credentials_present + + credentials = { + "api_key": "key", + "api_secret": "secret", + "api_passphrase": "pass", + } + + is_valid, error = validate_credentials_present(credentials, "polymarket") + + assert is_valid is True + assert error is None + + def test_validate_polymarket_credentials_missing_key(self): + """Test validation fails when api_key is missing.""" + from dr_manhattan.mcp.utils.security import validate_credentials_present + + credentials = {"api_secret": "secret", "api_passphrase": "pass"} + + is_valid, error = validate_credentials_present(credentials, "polymarket") + + assert is_valid is False + assert "api_key" in error + + def test_validate_polymarket_credentials_missing_secret(self): + """Test validation fails when api_secret is missing.""" + from dr_manhattan.mcp.utils.security import validate_credentials_present + + credentials = {"api_key": "key", "api_passphrase": "pass"} + + is_valid, error = validate_credentials_present(credentials, "polymarket") + + assert is_valid is False + assert "api_secret" in error + + def test_validate_unknown_exchange(self): + """Test validation for unknown exchange (no required fields).""" + from dr_manhattan.mcp.utils.security import validate_credentials_present + + credentials = {"some_key": "value"} + + # Unknown exchanges have no requirements in SSE mode + is_valid, error = validate_credentials_present(credentials, "limitless") + + assert is_valid is True + assert error is None + + def test_error_message_transport_agnostic(self): + """Test that error messages don't contain HTTP-specific references.""" + from dr_manhattan.mcp.utils.security import validate_credentials_present + + credentials = {} + + is_valid, error = validate_credentials_present(credentials, "polymarket") + + assert is_valid is False + # Should NOT contain HTTP header names like X-Polymarket-Api-Key + assert "X-" not in error + assert "header" not in error.lower() + + +class TestWriteOperationValidation: + """Tests for write operation restrictions.""" + + def test_write_operation_allowed_for_polymarket(self): + """Test that write operations are allowed for Polymarket.""" + from dr_manhattan.mcp.utils.security import validate_write_operation + + is_allowed, error = validate_write_operation("create_order", "polymarket") + + assert is_allowed is True + assert error is None + + def test_write_operation_blocked_for_other_exchanges(self): + """Test that write operations are blocked for non-Polymarket exchanges.""" + from dr_manhattan.mcp.utils.security import validate_write_operation + + for exchange in ["limitless", "opinion", "kalshi", "predictfun"]: + is_allowed, error = validate_write_operation("create_order", exchange) + + assert is_allowed is False + assert "not supported" in error + assert "Builder profile" in error + + def test_read_operation_allowed_for_all_exchanges(self): + """Test that read operations are allowed for all exchanges.""" + from dr_manhattan.mcp.utils.security import validate_write_operation + + for exchange in ["polymarket", "limitless", "opinion", "kalshi"]: + is_allowed, error = validate_write_operation("fetch_markets", exchange) + + assert is_allowed is True + assert error is None + + def test_all_write_operations_blocked_for_other_exchanges(self): + """Test that all write operations are blocked for non-Polymarket.""" + from dr_manhattan.mcp.utils.security import WRITE_OPERATIONS, validate_write_operation + + for op in WRITE_OPERATIONS: + is_allowed, error = validate_write_operation(op, "limitless") + + assert is_allowed is False + assert error is not None + + def test_write_operation_without_exchange(self): + """Test write operation without exchange parameter.""" + from dr_manhattan.mcp.utils.security import validate_write_operation + + is_allowed, error = validate_write_operation("create_order", None) + + assert is_allowed is False + assert "requires an exchange" in error + + +@pytest.mark.skipif(not HAS_MCP, reason="MCP package not installed") +class TestHealthCheck: + """Tests for health check endpoint (requires mcp package).""" + + def test_health_check_returns_healthy(self): + """Test that health check returns healthy status.""" + from starlette.testclient import TestClient + + from dr_manhattan.mcp.server_sse import app + + client = TestClient(app) + response = client.get("/health") + + assert response.status_code == 200 + data = response.json() + assert data["status"] == "healthy" + assert data["service"] == "dr-manhattan-mcp" + assert data["transport"] == "sse" + + +@pytest.mark.skipif(not HAS_MCP, reason="MCP package not installed") +class TestRootEndpoint: + """Tests for root endpoint (requires mcp package).""" + + def test_root_returns_usage_info(self): + """Test that root endpoint returns usage information.""" + from starlette.testclient import TestClient + + from dr_manhattan.mcp.server_sse import app + + client = TestClient(app) + response = client.get("/") + + assert response.status_code == 200 + data = response.json() + assert "service" in data + assert "endpoints" in data + assert "/sse" in data["endpoints"] + assert "/health" in data["endpoints"] + + def test_root_shows_security_model(self): + """Test that root endpoint shows security model.""" + from starlette.testclient import TestClient + + from dr_manhattan.mcp.server_sse import app + + client = TestClient(app) + response = client.get("/") + + data = response.json() + assert "security" in data + assert "Polymarket" in data["security"]["write_operations"] + + +@pytest.mark.skipif(not HAS_MCP, reason="MCP package not installed") +class TestEnvironmentValidation: + """Tests for environment variable validation (requires mcp package).""" + + def test_invalid_port_raises_error(self): + """Test that invalid PORT causes error.""" + from dr_manhattan.mcp.server_sse import _validate_env + + with patch.dict("os.environ", {"PORT": "invalid"}, clear=False): + with pytest.raises(SystemExit): + _validate_env() + + def test_port_out_of_range_raises_error(self): + """Test that PORT outside valid range causes error.""" + from dr_manhattan.mcp.server_sse import _validate_env + + with patch.dict("os.environ", {"PORT": "99999"}, clear=False): + with pytest.raises(SystemExit): + _validate_env() + + def test_valid_port_returns_config(self): + """Test that valid PORT returns correct config.""" + from dr_manhattan.mcp.server_sse import _validate_env + + with patch.dict("os.environ", {"PORT": "3000", "HOST": "127.0.0.1"}, clear=False): + host, port = _validate_env() + + assert host == "127.0.0.1" + assert port == 3000 + + +@pytest.mark.skipif(not HAS_MCP, reason="MCP package not installed") +class TestToolDefinitions: + """Tests for shared tool definitions (requires mcp package).""" + + def test_tool_definitions_not_empty(self): + """Test that tool definitions are loaded.""" + from dr_manhattan.mcp.tools import get_tool_definitions + + tools = get_tool_definitions() + + assert len(tools) > 0 + + def test_tool_dispatch_matches_definitions(self): + """Test that dispatch table has entry for each tool.""" + from dr_manhattan.mcp.tools import TOOL_DISPATCH, get_tool_definitions + + tools = get_tool_definitions() + tool_names = {t.name for t in tools} + + assert set(TOOL_DISPATCH.keys()) == tool_names + + def test_required_tools_present(self): + """Test that essential tools are defined.""" + from dr_manhattan.mcp.tools import get_tool_definitions + + tools = get_tool_definitions() + tool_names = {t.name for t in tools} + + required = [ + "list_exchanges", + "search_markets", + "fetch_balance", + "create_order", + ] + + for name in required: + assert name in tool_names, f"Missing required tool: {name}" diff --git a/tests/test_polymarket.py b/tests/test_polymarket.py index 68bd6bf..bf77e4a 100644 --- a/tests/test_polymarket.py +++ b/tests/test_polymarket.py @@ -3,7 +3,6 @@ from unittest.mock import Mock, patch import pytest -from requests.exceptions import HTTPError from dr_manhattan.base.errors import AuthenticationError, MarketNotFound from dr_manhattan.exchanges.polymarket import Polymarket @@ -74,36 +73,41 @@ def test_fetch_markets(mock_get): assert markets[0].prices == {"Yes": 0.6, "No": 0.4} -@patch("requests.request") -def test_fetch_market(mock_request): +@patch.object(Polymarket, "fetch_token_ids", return_value=["token1", "token2"]) +@patch("requests.get") +def test_fetch_market(mock_get, mock_fetch_token_ids): """Test fetching a specific market""" mock_response = Mock() - mock_response.json.return_value = { - "id": "0xmarket123", - "question": "Test question?", - "outcomes": '["Yes", "No"]', - "outcomePrices": '["0.5", "0.5"]', - "clobTokenIds": '["token1", "token2"]', - "active": True, - "closed": False, - "minimum_tick_size": 0.01, - } - mock_response.raise_for_status = Mock() - mock_request.return_value = mock_response + mock_response.status_code = 200 + mock_response.json.return_value = [ + { + "id": "0xmarket123", + "question": "Test question?", + "outcomes": '["Yes", "No"]', + "outcomePrices": '["0.5", "0.5"]', + "clobTokenIds": '["token1", "token2"]', + "active": True, + "closed": False, + "minimum_tick_size": 0.01, + } + ] + mock_get.return_value = mock_response exchange = Polymarket() market = exchange.fetch_market("0xmarket123") assert market.id == "0xmarket123" assert market.question == "Test question?" + mock_fetch_token_ids.assert_called_once_with("0xmarket123") -@patch("requests.request") -def test_fetch_market_not_found(mock_request): +@patch("requests.get") +def test_fetch_market_not_found(mock_get): """Test fetching non-existent market""" mock_response = Mock() - mock_response.raise_for_status.side_effect = HTTPError("404 Not Found") - mock_request.return_value = mock_response + mock_response.status_code = 404 + mock_response.json.return_value = [] + mock_get.return_value = mock_response exchange = Polymarket() diff --git a/tests/test_polymarket_builder.py b/tests/test_polymarket_builder.py new file mode 100644 index 0000000..5ad0822 --- /dev/null +++ b/tests/test_polymarket_builder.py @@ -0,0 +1,85 @@ +"""Tests for PolymarketBuilder exchange class.""" + +import pytest + +from dr_manhattan.base.errors import AuthenticationError +from dr_manhattan.exchanges.polymarket_builder import PolymarketBuilder + + +class TestPolymarketBuilderInit: + """Tests for PolymarketBuilder initialization.""" + + def test_requires_all_credentials(self): + """Test that all three credentials are required.""" + with pytest.raises( + AuthenticationError, match="requires api_key, api_secret, and api_passphrase" + ): + PolymarketBuilder({"api_key": "test"}) + + with pytest.raises( + AuthenticationError, match="requires api_key, api_secret, and api_passphrase" + ): + PolymarketBuilder({"api_key": "test", "api_secret": "test"}) + + def test_initializes_with_all_credentials(self): + """Test that it initializes with all credentials.""" + exchange = PolymarketBuilder( + { + "api_key": "test_key", + "api_secret": "test_secret", + "api_passphrase": "test_pass", + } + ) + + assert exchange.id == "polymarket" + assert exchange.name == "Polymarket" + assert exchange._clob_client is not None + assert exchange._clob_client.can_builder_auth() + + def test_no_private_key_stored(self): + """Test that no private key is stored.""" + exchange = PolymarketBuilder( + { + "api_key": "test_key", + "api_secret": "test_secret", + "api_passphrase": "test_pass", + } + ) + + assert exchange.private_key is None + assert exchange.funder is None + + +class TestPolymarketBuilderMethods: + """Tests for PolymarketBuilder methods.""" + + @pytest.fixture + def builder_exchange(self): + """Create a PolymarketBuilder instance for testing.""" + return PolymarketBuilder( + { + "api_key": "test_key", + "api_secret": "test_secret", + "api_passphrase": "test_pass", + } + ) + + def test_inherits_from_polymarket(self, builder_exchange): + """Test that PolymarketBuilder inherits from Polymarket.""" + from dr_manhattan.exchanges.polymarket import Polymarket + + assert isinstance(builder_exchange, Polymarket) + + def test_has_read_methods(self, builder_exchange): + """Test that read methods are inherited.""" + assert hasattr(builder_exchange, "fetch_markets") + assert hasattr(builder_exchange, "fetch_market") + assert hasattr(builder_exchange, "get_orderbook") + assert hasattr(builder_exchange, "search_markets") + + def test_has_write_methods(self, builder_exchange): + """Test that write methods are available.""" + assert hasattr(builder_exchange, "create_order") + assert hasattr(builder_exchange, "cancel_order") + assert hasattr(builder_exchange, "fetch_balance") + assert hasattr(builder_exchange, "fetch_open_orders") diff --git a/tests/test_polymarket_operator.py b/tests/test_polymarket_operator.py new file mode 100644 index 0000000..628310b --- /dev/null +++ b/tests/test_polymarket_operator.py @@ -0,0 +1,75 @@ +"""Tests for PolymarketOperator exchange class.""" + +import os +from unittest.mock import patch + +import pytest + +from dr_manhattan.base.errors import AuthenticationError +from dr_manhattan.exchanges.polymarket_operator import PolymarketOperator + + +class TestPolymarketOperatorInit: + """Tests for PolymarketOperator initialization.""" + + def test_requires_operator_key_env_var(self): + """Test that POLYMARKET_OPERATOR_KEY is required.""" + with patch.dict(os.environ, {}, clear=True): + with pytest.raises( + AuthenticationError, + match="POLYMARKET_OPERATOR_KEY environment variable is required", + ): + PolymarketOperator({"user_address": "0x1234"}) + + def test_requires_user_address(self): + """Test that user_address is required in config.""" + with patch.dict(os.environ, {"POLYMARKET_OPERATOR_KEY": "0x" + "a" * 64}): + with pytest.raises(AuthenticationError, match="user_address is required"): + PolymarketOperator({}) + + def test_requires_both_operator_key_and_user_address(self): + """Test that both operator key and user address are needed.""" + # Missing operator key + with patch.dict(os.environ, {}, clear=True): + with pytest.raises(AuthenticationError): + PolymarketOperator({"user_address": "0x1234"}) + + # Missing user address + with patch.dict(os.environ, {"POLYMARKET_OPERATOR_KEY": "0x" + "a" * 64}): + with pytest.raises(AuthenticationError): + PolymarketOperator({}) + + +class TestPolymarketOperatorProperties: + """Tests for PolymarketOperator properties.""" + + def test_user_address_property(self): + """Test user_address property returns the configured address.""" + test_address = "0x1234567890abcdef1234567890abcdef12345678" + with patch.dict(os.environ, {"POLYMARKET_OPERATOR_KEY": "0x" + "a" * 64}): + with patch.object(PolymarketOperator, "_initialize_operator_client", return_value=None): + operator = object.__new__(PolymarketOperator) + operator._user_address = test_address + assert operator.user_address == test_address + + def test_inherits_from_polymarket(self): + """Test that PolymarketOperator inherits from Polymarket.""" + from dr_manhattan.exchanges.polymarket import Polymarket + + assert issubclass(PolymarketOperator, Polymarket) + + +class TestPolymarketOperatorMethods: + """Tests for PolymarketOperator methods.""" + + def test_has_trading_methods(self): + """Test that trading methods are defined.""" + assert hasattr(PolymarketOperator, "create_order") + assert hasattr(PolymarketOperator, "cancel_order") + assert hasattr(PolymarketOperator, "fetch_balance") + assert hasattr(PolymarketOperator, "fetch_open_orders") + assert hasattr(PolymarketOperator, "fetch_positions") + + def test_has_operator_specific_methods(self): + """Test that operator-specific methods exist.""" + assert hasattr(PolymarketOperator, "check_operator_approval") diff --git a/uv.lock b/uv.lock index f1a824b..2c41444 100644 --- a/uv.lock +++ b/uv.lock @@ -884,6 +884,8 @@ dependencies = [ [package.optional-dependencies] mcp = [ { name = "mcp" }, + { name = "starlette" }, + { name = "uvicorn" }, ] [package.dev-dependencies] @@ -912,6 +914,8 @@ requires-dist = [ { name = "python-socketio", extras = ["asyncio-client"], specifier = ">=5.11.0" }, { name = "requests", specifier = ">=2.31.0" }, { name = "rich", specifier = ">=14.2.0" }, + { name = "starlette", marker = "extra == 'mcp'", specifier = ">=0.36.0" }, + { name = "uvicorn", marker = "extra == 'mcp'", specifier = ">=0.27.0" }, { name = "websockets", specifier = ">=15.0.1" }, ] provides-extras = ["mcp"] diff --git a/website/.gitignore b/website/.gitignore new file mode 100644 index 0000000..a14702c --- /dev/null +++ b/website/.gitignore @@ -0,0 +1,34 @@ +# dependencies (bun install) +node_modules + +# output +out +dist +*.tgz + +# code coverage +coverage +*.lcov + +# logs +logs +_.log +report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json + +# dotenv environment variable files +.env +.env.development.local +.env.test.local +.env.production.local +.env.local + +# caches +.eslintcache +.cache +*.tsbuildinfo + +# IntelliJ based IDEs +.idea + +# Finder (MacOS) folder config +.DS_Store diff --git a/website/CLAUDE.md b/website/CLAUDE.md new file mode 100644 index 0000000..b8100b7 --- /dev/null +++ b/website/CLAUDE.md @@ -0,0 +1,111 @@ +--- +description: Use Bun instead of Node.js, npm, pnpm, or vite. +globs: "*.ts, *.tsx, *.html, *.css, *.js, *.jsx, package.json" +alwaysApply: false +--- + +Default to using Bun instead of Node.js. + +- Use `bun ` instead of `node ` or `ts-node ` +- Use `bun test` instead of `jest` or `vitest` +- Use `bun build ` instead of `webpack` or `esbuild` +- Use `bun install` instead of `npm install` or `yarn install` or `pnpm install` +- Use `bun run + + +``` + +With the following `frontend.tsx`: + +```tsx#frontend.tsx +import React from "react"; + +// import .css files directly and it works +import './index.css'; + +import { createRoot } from "react-dom/client"; + +const root = createRoot(document.body); + +export default function Frontend() { + return

Hello, world!

; +} + +root.render(); +``` + +Then, run index.ts + +```sh +bun --hot ./index.ts +``` + +For more information, read the Bun API docs in `node_modules/bun-types/docs/**.md`. diff --git a/website/bun.lock b/website/bun.lock new file mode 100644 index 0000000..0ac87cd --- /dev/null +++ b/website/bun.lock @@ -0,0 +1,378 @@ +{ + "lockfileVersion": 1, + "workspaces": { + "": { + "name": "website", + "dependencies": { + "@rainbow-me/rainbowkit": "^2.2.10", + "@tanstack/react-query": "^5.90.20", + "@vitejs/plugin-react": "^5.1.2", + "react": "^19.2.3", + "react-dom": "^19.2.3", + "react-router-dom": "^7.13.0", + "viem": "2.43.2", + "vite": "^7.3.1", + "wagmi": "^3.4.1", + }, + "devDependencies": { + "@types/bun": "latest", + "@types/react": "^19.2.9", + "@types/react-dom": "^19.2.3", + }, + "peerDependencies": { + "typescript": "^5", + }, + }, + }, + "packages": { + "@adraffy/ens-normalize": ["@adraffy/ens-normalize@1.11.1", "", {}, "sha512-nhCBV3quEgesuf7c7KYfperqSS14T8bYuvJ8PcLJp6znkZpFc0AuW4qBtr8eKVyPPe/8RSr7sglCWPU5eaxwKQ=="], + + "@babel/code-frame": ["@babel/code-frame@7.28.6", "", { "dependencies": { "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" } }, "sha512-JYgintcMjRiCvS8mMECzaEn+m3PfoQiyqukOMCCVQtoJGYJw8j/8LBJEiqkHLkfwCcs74E3pbAUFNg7d9VNJ+Q=="], + + "@babel/compat-data": ["@babel/compat-data@7.28.6", "", {}, "sha512-2lfu57JtzctfIrcGMz992hyLlByuzgIk58+hhGCxjKZ3rWI82NnVLjXcaTqkI2NvlcvOskZaiZ5kjUALo3Lpxg=="], + + "@babel/core": ["@babel/core@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/generator": "^7.28.6", "@babel/helper-compilation-targets": "^7.28.6", "@babel/helper-module-transforms": "^7.28.6", "@babel/helpers": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/template": "^7.28.6", "@babel/traverse": "^7.28.6", "@babel/types": "^7.28.6", "@jridgewell/remapping": "^2.3.5", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", "json5": "^2.2.3", "semver": "^6.3.1" } }, "sha512-H3mcG6ZDLTlYfaSNi0iOKkigqMFvkTKlGUYlD8GW7nNOYRrevuA46iTypPyv+06V3fEmvvazfntkBU34L0azAw=="], + + "@babel/generator": ["@babel/generator@7.28.6", "", { "dependencies": { "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" } }, "sha512-lOoVRwADj8hjf7al89tvQ2a1lf53Z+7tiXMgpZJL3maQPDxh0DgLMN62B2MKUOFcoodBHLMbDM6WAbKgNy5Suw=="], + + "@babel/helper-compilation-targets": ["@babel/helper-compilation-targets@7.28.6", "", { "dependencies": { "@babel/compat-data": "^7.28.6", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" } }, "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA=="], + + "@babel/helper-globals": ["@babel/helper-globals@7.28.0", "", {}, "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw=="], + + "@babel/helper-module-imports": ["@babel/helper-module-imports@7.28.6", "", { "dependencies": { "@babel/traverse": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw=="], + + "@babel/helper-module-transforms": ["@babel/helper-module-transforms@7.28.6", "", { "dependencies": { "@babel/helper-module-imports": "^7.28.6", "@babel/helper-validator-identifier": "^7.28.5", "@babel/traverse": "^7.28.6" }, "peerDependencies": { "@babel/core": "^7.0.0" } }, "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA=="], + + "@babel/helper-plugin-utils": ["@babel/helper-plugin-utils@7.28.6", "", {}, "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug=="], + + "@babel/helper-string-parser": ["@babel/helper-string-parser@7.27.1", "", {}, "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA=="], + + "@babel/helper-validator-identifier": ["@babel/helper-validator-identifier@7.28.5", "", {}, "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q=="], + + "@babel/helper-validator-option": ["@babel/helper-validator-option@7.27.1", "", {}, "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg=="], + + "@babel/helpers": ["@babel/helpers@7.28.6", "", { "dependencies": { "@babel/template": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw=="], + + "@babel/parser": ["@babel/parser@7.28.6", "", { "dependencies": { "@babel/types": "^7.28.6" }, "bin": "./bin/babel-parser.js" }, "sha512-TeR9zWR18BvbfPmGbLampPMW+uW1NZnJlRuuHso8i87QZNq2JRF9i6RgxRqtEq+wQGsS19NNTWr2duhnE49mfQ=="], + + "@babel/plugin-transform-react-jsx-self": ["@babel/plugin-transform-react-jsx-self@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw=="], + + "@babel/plugin-transform-react-jsx-source": ["@babel/plugin-transform-react-jsx-source@7.27.1", "", { "dependencies": { "@babel/helper-plugin-utils": "^7.27.1" }, "peerDependencies": { "@babel/core": "^7.0.0-0" } }, "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw=="], + + "@babel/runtime": ["@babel/runtime@7.28.6", "", {}, "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA=="], + + "@babel/template": ["@babel/template@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/parser": "^7.28.6", "@babel/types": "^7.28.6" } }, "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ=="], + + "@babel/traverse": ["@babel/traverse@7.28.6", "", { "dependencies": { "@babel/code-frame": "^7.28.6", "@babel/generator": "^7.28.6", "@babel/helper-globals": "^7.28.0", "@babel/parser": "^7.28.6", "@babel/template": "^7.28.6", "@babel/types": "^7.28.6", "debug": "^4.3.1" } }, "sha512-fgWX62k02qtjqdSNTAGxmKYY/7FSL9WAS1o2Hu5+I5m9T0yxZzr4cnrfXQ/MX0rIifthCSs6FKTlzYbJcPtMNg=="], + + "@babel/types": ["@babel/types@7.28.6", "", { "dependencies": { "@babel/helper-string-parser": "^7.27.1", "@babel/helper-validator-identifier": "^7.28.5" } }, "sha512-0ZrskXVEHSWIqZM/sQZ4EV3jZJXRkio/WCxaqKZP1g//CEWEPSfeZFcms4XeKBCHU0ZKnIkdJeU/kF+eRp5lBg=="], + + "@emotion/hash": ["@emotion/hash@0.9.2", "", {}, "sha512-MyqliTZGuOm3+5ZRSaaBGP3USLw6+EGykkwZns2EPC5g8jJ4z9OrdZY9apkl3+UP9+sdz76YYkwCKP5gh8iY3g=="], + + "@esbuild/aix-ppc64": ["@esbuild/aix-ppc64@0.27.2", "", { "os": "aix", "cpu": "ppc64" }, "sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw=="], + + "@esbuild/android-arm": ["@esbuild/android-arm@0.27.2", "", { "os": "android", "cpu": "arm" }, "sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA=="], + + "@esbuild/android-arm64": ["@esbuild/android-arm64@0.27.2", "", { "os": "android", "cpu": "arm64" }, "sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA=="], + + "@esbuild/android-x64": ["@esbuild/android-x64@0.27.2", "", { "os": "android", "cpu": "x64" }, "sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A=="], + + "@esbuild/darwin-arm64": ["@esbuild/darwin-arm64@0.27.2", "", { "os": "darwin", "cpu": "arm64" }, "sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg=="], + + "@esbuild/darwin-x64": ["@esbuild/darwin-x64@0.27.2", "", { "os": "darwin", "cpu": "x64" }, "sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA=="], + + "@esbuild/freebsd-arm64": ["@esbuild/freebsd-arm64@0.27.2", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g=="], + + "@esbuild/freebsd-x64": ["@esbuild/freebsd-x64@0.27.2", "", { "os": "freebsd", "cpu": "x64" }, "sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA=="], + + "@esbuild/linux-arm": ["@esbuild/linux-arm@0.27.2", "", { "os": "linux", "cpu": "arm" }, "sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw=="], + + "@esbuild/linux-arm64": ["@esbuild/linux-arm64@0.27.2", "", { "os": "linux", "cpu": "arm64" }, "sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw=="], + + "@esbuild/linux-ia32": ["@esbuild/linux-ia32@0.27.2", "", { "os": "linux", "cpu": "ia32" }, "sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w=="], + + "@esbuild/linux-loong64": ["@esbuild/linux-loong64@0.27.2", "", { "os": "linux", "cpu": "none" }, "sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg=="], + + "@esbuild/linux-mips64el": ["@esbuild/linux-mips64el@0.27.2", "", { "os": "linux", "cpu": "none" }, "sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw=="], + + "@esbuild/linux-ppc64": ["@esbuild/linux-ppc64@0.27.2", "", { "os": "linux", "cpu": "ppc64" }, "sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ=="], + + "@esbuild/linux-riscv64": ["@esbuild/linux-riscv64@0.27.2", "", { "os": "linux", "cpu": "none" }, "sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA=="], + + "@esbuild/linux-s390x": ["@esbuild/linux-s390x@0.27.2", "", { "os": "linux", "cpu": "s390x" }, "sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w=="], + + "@esbuild/linux-x64": ["@esbuild/linux-x64@0.27.2", "", { "os": "linux", "cpu": "x64" }, "sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA=="], + + "@esbuild/netbsd-arm64": ["@esbuild/netbsd-arm64@0.27.2", "", { "os": "none", "cpu": "arm64" }, "sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw=="], + + "@esbuild/netbsd-x64": ["@esbuild/netbsd-x64@0.27.2", "", { "os": "none", "cpu": "x64" }, "sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA=="], + + "@esbuild/openbsd-arm64": ["@esbuild/openbsd-arm64@0.27.2", "", { "os": "openbsd", "cpu": "arm64" }, "sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA=="], + + "@esbuild/openbsd-x64": ["@esbuild/openbsd-x64@0.27.2", "", { "os": "openbsd", "cpu": "x64" }, "sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg=="], + + "@esbuild/openharmony-arm64": ["@esbuild/openharmony-arm64@0.27.2", "", { "os": "none", "cpu": "arm64" }, "sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag=="], + + "@esbuild/sunos-x64": ["@esbuild/sunos-x64@0.27.2", "", { "os": "sunos", "cpu": "x64" }, "sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg=="], + + "@esbuild/win32-arm64": ["@esbuild/win32-arm64@0.27.2", "", { "os": "win32", "cpu": "arm64" }, "sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg=="], + + "@esbuild/win32-ia32": ["@esbuild/win32-ia32@0.27.2", "", { "os": "win32", "cpu": "ia32" }, "sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ=="], + + "@esbuild/win32-x64": ["@esbuild/win32-x64@0.27.2", "", { "os": "win32", "cpu": "x64" }, "sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ=="], + + "@jridgewell/gen-mapping": ["@jridgewell/gen-mapping@0.3.13", "", { "dependencies": { "@jridgewell/sourcemap-codec": "^1.5.0", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA=="], + + "@jridgewell/remapping": ["@jridgewell/remapping@2.3.5", "", { "dependencies": { "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.24" } }, "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ=="], + + "@jridgewell/resolve-uri": ["@jridgewell/resolve-uri@3.1.2", "", {}, "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="], + + "@jridgewell/sourcemap-codec": ["@jridgewell/sourcemap-codec@1.5.5", "", {}, "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="], + + "@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.31", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="], + + "@noble/ciphers": ["@noble/ciphers@1.3.0", "", {}, "sha512-2I0gnIVPtfnMw9ee9h1dJG7tp81+8Ob3OJb3Mv37rx5L40/b0i7djjCVvGOVqc9AEIQyvyu1i6ypKdFw8R8gQw=="], + + "@noble/curves": ["@noble/curves@1.9.1", "", { "dependencies": { "@noble/hashes": "1.8.0" } }, "sha512-k11yZxZg+t+gWvBbIswW0yoJlu8cHOC7dhunwOzoWH/mXGBiYyR4YY6hAEK/3EUs4UpB8la1RfdRpeGsFHkWsA=="], + + "@noble/hashes": ["@noble/hashes@1.8.0", "", {}, "sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A=="], + + "@rainbow-me/rainbowkit": ["@rainbow-me/rainbowkit@2.2.10", "", { "dependencies": { "@vanilla-extract/css": "1.17.3", "@vanilla-extract/dynamic": "2.1.4", "@vanilla-extract/sprinkles": "1.6.4", "clsx": "2.1.1", "cuer": "0.0.3", "react-remove-scroll": "2.6.2", "ua-parser-js": "^1.0.37" }, "peerDependencies": { "@tanstack/react-query": ">=5.0.0", "react": ">=18", "react-dom": ">=18", "viem": "2.x", "wagmi": "^2.9.0" } }, "sha512-8+E4die1A2ovN9t3lWxWnwqTGEdFqThXDQRj+E4eDKuUKyymYD+66Gzm6S9yfg8E95c6hmGlavGUfYPtl1EagA=="], + + "@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.53", "", {}, "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ=="], + + "@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.56.0", "", { "os": "android", "cpu": "arm" }, "sha512-LNKIPA5k8PF1+jAFomGe3qN3bbIgJe/IlpDBwuVjrDKrJhVWywgnJvflMt/zkbVNLFtF1+94SljYQS6e99klnw=="], + + "@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.56.0", "", { "os": "android", "cpu": "arm64" }, "sha512-lfbVUbelYqXlYiU/HApNMJzT1E87UPGvzveGg2h0ktUNlOCxKlWuJ9jtfvs1sKHdwU4fzY7Pl8sAl49/XaEk6Q=="], + + "@rollup/rollup-darwin-arm64": ["@rollup/rollup-darwin-arm64@4.56.0", "", { "os": "darwin", "cpu": "arm64" }, "sha512-EgxD1ocWfhoD6xSOeEEwyE7tDvwTgZc8Bss7wCWe+uc7wO8G34HHCUH+Q6cHqJubxIAnQzAsyUsClt0yFLu06w=="], + + "@rollup/rollup-darwin-x64": ["@rollup/rollup-darwin-x64@4.56.0", "", { "os": "darwin", "cpu": "x64" }, "sha512-1vXe1vcMOssb/hOF8iv52A7feWW2xnu+c8BV4t1F//m9QVLTfNVpEdja5ia762j/UEJe2Z1jAmEqZAK42tVW3g=="], + + "@rollup/rollup-freebsd-arm64": ["@rollup/rollup-freebsd-arm64@4.56.0", "", { "os": "freebsd", "cpu": "arm64" }, "sha512-bof7fbIlvqsyv/DtaXSck4VYQ9lPtoWNFCB/JY4snlFuJREXfZnm+Ej6yaCHfQvofJDXLDMTVxWscVSuQvVWUQ=="], + + "@rollup/rollup-freebsd-x64": ["@rollup/rollup-freebsd-x64@4.56.0", "", { "os": "freebsd", "cpu": "x64" }, "sha512-KNa6lYHloW+7lTEkYGa37fpvPq+NKG/EHKM8+G/g9WDU7ls4sMqbVRV78J6LdNuVaeeK5WB9/9VAFbKxcbXKYg=="], + + "@rollup/rollup-linux-arm-gnueabihf": ["@rollup/rollup-linux-arm-gnueabihf@4.56.0", "", { "os": "linux", "cpu": "arm" }, "sha512-E8jKK87uOvLrrLN28jnAAAChNq5LeCd2mGgZF+fGF5D507WlG/Noct3lP/QzQ6MrqJ5BCKNwI9ipADB6jyiq2A=="], + + "@rollup/rollup-linux-arm-musleabihf": ["@rollup/rollup-linux-arm-musleabihf@4.56.0", "", { "os": "linux", "cpu": "arm" }, "sha512-jQosa5FMYF5Z6prEpTCCmzCXz6eKr/tCBssSmQGEeozA9tkRUty/5Vx06ibaOP9RCrW1Pvb8yp3gvZhHwTDsJw=="], + + "@rollup/rollup-linux-arm64-gnu": ["@rollup/rollup-linux-arm64-gnu@4.56.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-uQVoKkrC1KGEV6udrdVahASIsaF8h7iLG0U0W+Xn14ucFwi6uS539PsAr24IEF9/FoDtzMeeJXJIBo5RkbNWvQ=="], + + "@rollup/rollup-linux-arm64-musl": ["@rollup/rollup-linux-arm64-musl@4.56.0", "", { "os": "linux", "cpu": "arm64" }, "sha512-vLZ1yJKLxhQLFKTs42RwTwa6zkGln+bnXc8ueFGMYmBTLfNu58sl5/eXyxRa2RarTkJbXl8TKPgfS6V5ijNqEA=="], + + "@rollup/rollup-linux-loong64-gnu": ["@rollup/rollup-linux-loong64-gnu@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-FWfHOCub564kSE3xJQLLIC/hbKqHSVxy8vY75/YHHzWvbJL7aYJkdgwD/xGfUlL5UV2SB7otapLrcCj2xnF1dg=="], + + "@rollup/rollup-linux-loong64-musl": ["@rollup/rollup-linux-loong64-musl@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-z1EkujxIh7nbrKL1lmIpqFTc/sr0u8Uk0zK/qIEFldbt6EDKWFk/pxFq3gYj4Bjn3aa9eEhYRlL3H8ZbPT1xvA=="], + + "@rollup/rollup-linux-ppc64-gnu": ["@rollup/rollup-linux-ppc64-gnu@4.56.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-iNFTluqgdoQC7AIE8Q34R3AuPrJGJirj5wMUErxj22deOcY7XwZRaqYmB6ZKFHoVGqRcRd0mqO+845jAibKCkw=="], + + "@rollup/rollup-linux-ppc64-musl": ["@rollup/rollup-linux-ppc64-musl@4.56.0", "", { "os": "linux", "cpu": "ppc64" }, "sha512-MtMeFVlD2LIKjp2sE2xM2slq3Zxf9zwVuw0jemsxvh1QOpHSsSzfNOTH9uYW9i1MXFxUSMmLpeVeUzoNOKBaWg=="], + + "@rollup/rollup-linux-riscv64-gnu": ["@rollup/rollup-linux-riscv64-gnu@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-in+v6wiHdzzVhYKXIk5U74dEZHdKN9KH0Q4ANHOTvyXPG41bajYRsy7a8TPKbYPl34hU7PP7hMVHRvv/5aCSew=="], + + "@rollup/rollup-linux-riscv64-musl": ["@rollup/rollup-linux-riscv64-musl@4.56.0", "", { "os": "linux", "cpu": "none" }, "sha512-yni2raKHB8m9NQpI9fPVwN754mn6dHQSbDTwxdr9SE0ks38DTjLMMBjrwvB5+mXrX+C0npX0CVeCUcvvvD8CNQ=="], + + "@rollup/rollup-linux-s390x-gnu": ["@rollup/rollup-linux-s390x-gnu@4.56.0", "", { "os": "linux", "cpu": "s390x" }, "sha512-zhLLJx9nQPu7wezbxt2ut+CI4YlXi68ndEve16tPc/iwoylWS9B3FxpLS2PkmfYgDQtosah07Mj9E0khc3Y+vQ=="], + + "@rollup/rollup-linux-x64-gnu": ["@rollup/rollup-linux-x64-gnu@4.56.0", "", { "os": "linux", "cpu": "x64" }, "sha512-MVC6UDp16ZSH7x4rtuJPAEoE1RwS8N4oK9DLHy3FTEdFoUTCFVzMfJl/BVJ330C+hx8FfprA5Wqx4FhZXkj2Kw=="], + + "@rollup/rollup-linux-x64-musl": ["@rollup/rollup-linux-x64-musl@4.56.0", "", { "os": "linux", "cpu": "x64" }, "sha512-ZhGH1eA4Qv0lxaV00azCIS1ChedK0V32952Md3FtnxSqZTBTd6tgil4nZT5cU8B+SIw3PFYkvyR4FKo2oyZIHA=="], + + "@rollup/rollup-openbsd-x64": ["@rollup/rollup-openbsd-x64@4.56.0", "", { "os": "openbsd", "cpu": "x64" }, "sha512-O16XcmyDeFI9879pEcmtWvD/2nyxR9mF7Gs44lf1vGGx8Vg2DRNx11aVXBEqOQhWb92WN4z7fW/q4+2NYzCbBA=="], + + "@rollup/rollup-openharmony-arm64": ["@rollup/rollup-openharmony-arm64@4.56.0", "", { "os": "none", "cpu": "arm64" }, "sha512-LhN/Reh+7F3RCgQIRbgw8ZMwUwyqJM+8pXNT6IIJAqm2IdKkzpCh/V9EdgOMBKuebIrzswqy4ATlrDgiOwbRcQ=="], + + "@rollup/rollup-win32-arm64-msvc": ["@rollup/rollup-win32-arm64-msvc@4.56.0", "", { "os": "win32", "cpu": "arm64" }, "sha512-kbFsOObXp3LBULg1d3JIUQMa9Kv4UitDmpS+k0tinPBz3watcUiV2/LUDMMucA6pZO3WGE27P7DsfaN54l9ing=="], + + "@rollup/rollup-win32-ia32-msvc": ["@rollup/rollup-win32-ia32-msvc@4.56.0", "", { "os": "win32", "cpu": "ia32" }, "sha512-vSSgny54D6P4vf2izbtFm/TcWYedw7f8eBrOiGGecyHyQB9q4Kqentjaj8hToe+995nob/Wv48pDqL5a62EWtg=="], + + "@rollup/rollup-win32-x64-gnu": ["@rollup/rollup-win32-x64-gnu@4.56.0", "", { "os": "win32", "cpu": "x64" }, "sha512-FeCnkPCTHQJFbiGG49KjV5YGW/8b9rrXAM2Mz2kiIoktq2qsJxRD5giEMEOD2lPdgs72upzefaUvS+nc8E3UzQ=="], + + "@rollup/rollup-win32-x64-msvc": ["@rollup/rollup-win32-x64-msvc@4.56.0", "", { "os": "win32", "cpu": "x64" }, "sha512-H8AE9Ur/t0+1VXujj90w0HrSOuv0Nq9r1vSZF2t5km20NTfosQsGGUXDaKdQZzwuLts7IyL1fYT4hM95TI9c4g=="], + + "@scure/base": ["@scure/base@1.2.6", "", {}, "sha512-g/nm5FgUa//MCj1gV09zTJTaM6KBAHqLN907YVQqf7zC49+DcO4B1so4ZX07Ef10Twr6nuqYEH9GEggFXA4Fmg=="], + + "@scure/bip32": ["@scure/bip32@1.7.0", "", { "dependencies": { "@noble/curves": "~1.9.0", "@noble/hashes": "~1.8.0", "@scure/base": "~1.2.5" } }, "sha512-E4FFX/N3f4B80AKWp5dP6ow+flD1LQZo/w8UnLGYZO674jS6YnYeepycOOksv+vLPSpgN35wgKgy+ybfTb2SMw=="], + + "@scure/bip39": ["@scure/bip39@1.6.0", "", { "dependencies": { "@noble/hashes": "~1.8.0", "@scure/base": "~1.2.5" } }, "sha512-+lF0BbLiJNwVlev4eKelw1WWLaiKXw7sSl8T6FvBlWkdX+94aGJ4o8XjUdlyhTCjd8c+B3KT3JfS8P0bLRNU6A=="], + + "@tanstack/query-core": ["@tanstack/query-core@5.90.20", "", {}, "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg=="], + + "@tanstack/react-query": ["@tanstack/react-query@5.90.20", "", { "dependencies": { "@tanstack/query-core": "5.90.20" }, "peerDependencies": { "react": "^18 || ^19" } }, "sha512-vXBxa+qeyveVO7OA0jX1z+DeyCA4JKnThKv411jd5SORpBKgkcVnYKCiBgECvADvniBX7tobwBmg01qq9JmMJw=="], + + "@types/babel__core": ["@types/babel__core@7.20.5", "", { "dependencies": { "@babel/parser": "^7.20.7", "@babel/types": "^7.20.7", "@types/babel__generator": "*", "@types/babel__template": "*", "@types/babel__traverse": "*" } }, "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA=="], + + "@types/babel__generator": ["@types/babel__generator@7.27.0", "", { "dependencies": { "@babel/types": "^7.0.0" } }, "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg=="], + + "@types/babel__template": ["@types/babel__template@7.4.4", "", { "dependencies": { "@babel/parser": "^7.1.0", "@babel/types": "^7.0.0" } }, "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A=="], + + "@types/babel__traverse": ["@types/babel__traverse@7.28.0", "", { "dependencies": { "@babel/types": "^7.28.2" } }, "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q=="], + + "@types/bun": ["@types/bun@1.3.6", "", { "dependencies": { "bun-types": "1.3.6" } }, "sha512-uWCv6FO/8LcpREhenN1d1b6fcspAB+cefwD7uti8C8VffIv0Um08TKMn98FynpTiU38+y2dUO55T11NgDt8VAA=="], + + "@types/estree": ["@types/estree@1.0.8", "", {}, "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="], + + "@types/node": ["@types/node@25.0.10", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-zWW5KPngR/yvakJgGOmZ5vTBemDoSqF3AcV/LrO5u5wTWyEAVVh+IT39G4gtyAkh3CtTZs8aX/yRM82OfzHJRg=="], + + "@types/react": ["@types/react@19.2.9", "", { "dependencies": { "csstype": "^3.2.2" } }, "sha512-Lpo8kgb/igvMIPeNV2rsYKTgaORYdO1XGVZ4Qz3akwOj0ySGYMPlQWa8BaLn0G63D1aSaAQ5ldR06wCpChQCjA=="], + + "@types/react-dom": ["@types/react-dom@19.2.3", "", { "peerDependencies": { "@types/react": "^19.2.0" } }, "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ=="], + + "@vanilla-extract/css": ["@vanilla-extract/css@1.17.3", "", { "dependencies": { "@emotion/hash": "^0.9.0", "@vanilla-extract/private": "^1.0.8", "css-what": "^6.1.0", "cssesc": "^3.0.0", "csstype": "^3.0.7", "dedent": "^1.5.3", "deep-object-diff": "^1.1.9", "deepmerge": "^4.2.2", "lru-cache": "^10.4.3", "media-query-parser": "^2.0.2", "modern-ahocorasick": "^1.0.0", "picocolors": "^1.0.0" } }, "sha512-jHivr1UPoJTX5Uel4AZSOwrCf4mO42LcdmnhJtUxZaRWhW4FviFbIfs0moAWWld7GOT+2XnuVZjjA/K32uUnMQ=="], + + "@vanilla-extract/dynamic": ["@vanilla-extract/dynamic@2.1.4", "", { "dependencies": { "@vanilla-extract/private": "^1.0.8" } }, "sha512-7+Ot7VlP3cIzhJnTsY/kBtNs21s0YD7WI1rKJJKYP56BkbDxi/wrQUWMGEczKPUDkJuFcvbye+E2ub1u/mHH9w=="], + + "@vanilla-extract/private": ["@vanilla-extract/private@1.0.9", "", {}, "sha512-gT2jbfZuaaCLrAxwXbRgIhGhcXbRZCG3v4TTUnjw0EJ7ArdBRxkq4msNJkbuRkCgfIK5ATmprB5t9ljvLeFDEA=="], + + "@vanilla-extract/sprinkles": ["@vanilla-extract/sprinkles@1.6.4", "", { "peerDependencies": { "@vanilla-extract/css": "^1.0.0" } }, "sha512-lW3MuIcdIeHKX81DzhTnw68YJdL1ial05exiuvTLJMdHXQLKcVB93AncLPajMM6mUhaVVx5ALZzNHMTrq/U9Hg=="], + + "@vitejs/plugin-react": ["@vitejs/plugin-react@5.1.2", "", { "dependencies": { "@babel/core": "^7.28.5", "@babel/plugin-transform-react-jsx-self": "^7.27.1", "@babel/plugin-transform-react-jsx-source": "^7.27.1", "@rolldown/pluginutils": "1.0.0-beta.53", "@types/babel__core": "^7.20.5", "react-refresh": "^0.18.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" } }, "sha512-EcA07pHJouywpzsoTUqNh5NwGayl2PPVEJKUSinGGSxFGYn+shYbqMGBg6FXDqgXum9Ou/ecb+411ssw8HImJQ=="], + + "@wagmi/connectors": ["@wagmi/connectors@7.1.5", "", { "peerDependencies": { "@base-org/account": "^2.5.1", "@coinbase/wallet-sdk": "^4.3.6", "@gemini-wallet/core": "~0.3.1", "@metamask/sdk": "~0.33.1", "@safe-global/safe-apps-provider": "~0.18.6", "@safe-global/safe-apps-sdk": "^9.1.0", "@wagmi/core": "3.3.1", "@walletconnect/ethereum-provider": "^2.21.1", "porto": "~0.2.35", "typescript": ">=5.7.3", "viem": "2.x" }, "optionalPeers": ["@base-org/account", "@coinbase/wallet-sdk", "@gemini-wallet/core", "@metamask/sdk", "@safe-global/safe-apps-provider", "@safe-global/safe-apps-sdk", "@walletconnect/ethereum-provider", "porto", "typescript"] }, "sha512-+hrb4RJywjGtUsDZNLSc4eOF+jD6pVkCZ/KFi24p993u0ymsm/kGTLXjhYx5r8Rf/cxFHEiaQaRnEfB9qyDJyw=="], + + "@wagmi/core": ["@wagmi/core@3.3.1", "", { "dependencies": { "eventemitter3": "5.0.1", "mipd": "0.0.7", "zustand": "5.0.0" }, "peerDependencies": { "@tanstack/query-core": ">=5.0.0", "ox": ">=0.11.1", "typescript": ">=5.7.3", "viem": "2.x" }, "optionalPeers": ["@tanstack/query-core", "ox", "typescript"] }, "sha512-0Q8VYnVNPHe/gZsvj+Zddt8VpmKoMHXoVd887svL21QGKXEIVYiV/8R3qMv0SyC7q+GbQ5x9xezB56u3S8bWAQ=="], + + "abitype": ["abitype@1.2.3", "", { "peerDependencies": { "typescript": ">=5.0.4", "zod": "^3.22.0 || ^4.0.0" }, "optionalPeers": ["typescript", "zod"] }, "sha512-Ofer5QUnuUdTFsBRwARMoWKOH1ND5ehwYhJ3OJ/BQO+StkwQjHw0XyVh4vDttzHB7QOFhPHa/o413PJ82gU/Tg=="], + + "baseline-browser-mapping": ["baseline-browser-mapping@2.9.18", "", { "bin": { "baseline-browser-mapping": "dist/cli.js" } }, "sha512-e23vBV1ZLfjb9apvfPk4rHVu2ry6RIr2Wfs+O324okSidrX7pTAnEJPCh/O5BtRlr7QtZI7ktOP3vsqr7Z5XoA=="], + + "browserslist": ["browserslist@4.28.1", "", { "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", "electron-to-chromium": "^1.5.263", "node-releases": "^2.0.27", "update-browserslist-db": "^1.2.0" }, "bin": { "browserslist": "cli.js" } }, "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA=="], + + "bun-types": ["bun-types@1.3.6", "", { "dependencies": { "@types/node": "*" } }, "sha512-OlFwHcnNV99r//9v5IIOgQ9Uk37gZqrNMCcqEaExdkVq3Avwqok1bJFmvGMCkCE0FqzdY8VMOZpfpR3lwI+CsQ=="], + + "caniuse-lite": ["caniuse-lite@1.0.30001766", "", {}, "sha512-4C0lfJ0/YPjJQHagaE9x2Elb69CIqEPZeG0anQt9SIvIoOH4a4uaRl73IavyO+0qZh6MDLH//DrXThEYKHkmYA=="], + + "clsx": ["clsx@2.1.1", "", {}, "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA=="], + + "convert-source-map": ["convert-source-map@2.0.0", "", {}, "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="], + + "cookie": ["cookie@1.1.1", "", {}, "sha512-ei8Aos7ja0weRpFzJnEA9UHJ/7XQmqglbRwnf2ATjcB9Wq874VKH9kfjjirM6UhU2/E5fFYadylyhFldcqSidQ=="], + + "css-what": ["css-what@6.2.2", "", {}, "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA=="], + + "cssesc": ["cssesc@3.0.0", "", { "bin": { "cssesc": "bin/cssesc" } }, "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg=="], + + "csstype": ["csstype@3.2.3", "", {}, "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ=="], + + "cuer": ["cuer@0.0.3", "", { "dependencies": { "qr": "~0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18", "typescript": ">=5.4.0" }, "optionalPeers": ["typescript"] }, "sha512-f/UNxRMRCYtfLEGECAViByA3JNflZImOk11G9hwSd+44jvzrc99J35u5l+fbdQ2+ZG441GvOpaeGYBmWquZsbQ=="], + + "debug": ["debug@4.4.3", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="], + + "dedent": ["dedent@1.7.1", "", { "peerDependencies": { "babel-plugin-macros": "^3.1.0" }, "optionalPeers": ["babel-plugin-macros"] }, "sha512-9JmrhGZpOlEgOLdQgSm0zxFaYoQon408V1v49aqTWuXENVlnCuY9JBZcXZiCsZQWDjTm5Qf/nIvAy77mXDAjEg=="], + + "deep-object-diff": ["deep-object-diff@1.1.9", "", {}, "sha512-Rn+RuwkmkDwCi2/oXOFS9Gsr5lJZu/yTGpK7wAaAIE75CC+LCGEZHpY6VQJa/RoJcrmaA/docWJZvYohlNkWPA=="], + + "deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="], + + "detect-node-es": ["detect-node-es@1.1.0", "", {}, "sha512-ypdmJU/TbBby2Dxibuv7ZLW3Bs1QEmM7nHjEANfohJLvE0XVujisn1qPJcZxg+qDucsr+bP6fLD1rPS3AhJ7EQ=="], + + "electron-to-chromium": ["electron-to-chromium@1.5.278", "", {}, "sha512-dQ0tM1svDRQOwxnXxm+twlGTjr9Upvt8UFWAgmLsxEzFQxhbti4VwxmMjsDxVC51Zo84swW7FVCXEV+VAkhuPw=="], + + "esbuild": ["esbuild@0.27.2", "", { "optionalDependencies": { "@esbuild/aix-ppc64": "0.27.2", "@esbuild/android-arm": "0.27.2", "@esbuild/android-arm64": "0.27.2", "@esbuild/android-x64": "0.27.2", "@esbuild/darwin-arm64": "0.27.2", "@esbuild/darwin-x64": "0.27.2", "@esbuild/freebsd-arm64": "0.27.2", "@esbuild/freebsd-x64": "0.27.2", "@esbuild/linux-arm": "0.27.2", "@esbuild/linux-arm64": "0.27.2", "@esbuild/linux-ia32": "0.27.2", "@esbuild/linux-loong64": "0.27.2", "@esbuild/linux-mips64el": "0.27.2", "@esbuild/linux-ppc64": "0.27.2", "@esbuild/linux-riscv64": "0.27.2", "@esbuild/linux-s390x": "0.27.2", "@esbuild/linux-x64": "0.27.2", "@esbuild/netbsd-arm64": "0.27.2", "@esbuild/netbsd-x64": "0.27.2", "@esbuild/openbsd-arm64": "0.27.2", "@esbuild/openbsd-x64": "0.27.2", "@esbuild/openharmony-arm64": "0.27.2", "@esbuild/sunos-x64": "0.27.2", "@esbuild/win32-arm64": "0.27.2", "@esbuild/win32-ia32": "0.27.2", "@esbuild/win32-x64": "0.27.2" }, "bin": { "esbuild": "bin/esbuild" } }, "sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw=="], + + "escalade": ["escalade@3.2.0", "", {}, "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="], + + "eventemitter3": ["eventemitter3@5.0.1", "", {}, "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA=="], + + "fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="], + + "fsevents": ["fsevents@2.3.3", "", { "os": "darwin" }, "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="], + + "gensync": ["gensync@1.0.0-beta.2", "", {}, "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="], + + "get-nonce": ["get-nonce@1.0.1", "", {}, "sha512-FJhYRoDaiatfEkUK8HKlicmu/3SGFD51q3itKDGoSTysQJBnfOcxU5GxnhE1E6soB76MbT0MBtnKJuXyAx+96Q=="], + + "isows": ["isows@1.0.7", "", { "peerDependencies": { "ws": "*" } }, "sha512-I1fSfDCZL5P0v33sVqeTDSpcstAg/N+wF5HS033mogOVIp4B+oHC7oOCsA3axAbBSGTJ8QubbNmnIRN/h8U7hg=="], + + "js-tokens": ["js-tokens@4.0.0", "", {}, "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="], + + "jsesc": ["jsesc@3.1.0", "", { "bin": { "jsesc": "bin/jsesc" } }, "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA=="], + + "json5": ["json5@2.2.3", "", { "bin": { "json5": "lib/cli.js" } }, "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg=="], + + "lru-cache": ["lru-cache@10.4.3", "", {}, "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="], + + "media-query-parser": ["media-query-parser@2.0.2", "", { "dependencies": { "@babel/runtime": "^7.12.5" } }, "sha512-1N4qp+jE0pL5Xv4uEcwVUhIkwdUO3S/9gML90nqKA7v7FcOS5vUtatfzok9S9U1EJU8dHWlcv95WLnKmmxZI9w=="], + + "mipd": ["mipd@0.0.7", "", { "peerDependencies": { "typescript": ">=5.0.4" }, "optionalPeers": ["typescript"] }, "sha512-aAPZPNDQ3uMTdKbuO2YmAw2TxLHO0moa4YKAyETM/DTj5FloZo+a+8tU+iv4GmW+sOxKLSRwcSFuczk+Cpt6fg=="], + + "modern-ahocorasick": ["modern-ahocorasick@1.1.0", "", {}, "sha512-sEKPVl2rM+MNVkGQt3ChdmD8YsigmXdn5NifZn6jiwn9LRJpWm8F3guhaqrJT/JOat6pwpbXEk6kv+b9DMIjsQ=="], + + "ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="], + + "nanoid": ["nanoid@3.3.11", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w=="], + + "node-releases": ["node-releases@2.0.27", "", {}, "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA=="], + + "ox": ["ox@0.10.6", "", { "dependencies": { "@adraffy/ens-normalize": "^1.11.0", "@noble/ciphers": "^1.3.0", "@noble/curves": "1.9.1", "@noble/hashes": "^1.8.0", "@scure/bip32": "^1.7.0", "@scure/bip39": "^1.6.0", "abitype": "^1.2.3", "eventemitter3": "5.0.1" }, "peerDependencies": { "typescript": ">=5.4.0" }, "optionalPeers": ["typescript"] }, "sha512-J3QUxlwSM0uCL7sm5OsprlEeU6vNdKUyyukh1nUT3Jrog4l2FMJNIZPlffjPXCaS/hJYjdNe3XbEN8jCq1mnEQ=="], + + "picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="], + + "picomatch": ["picomatch@4.0.3", "", {}, "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q=="], + + "postcss": ["postcss@8.5.6", "", { "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg=="], + + "qr": ["qr@0.5.4", "", {}, "sha512-gjVMHOt7CX+BQd7JLQ9fnS4kJK4Lj4u+Conq52tcCbW7YH3mATTtBbTMA+7cQ1rKOkDo61olFHJReawe+XFxIA=="], + + "react": ["react@19.2.3", "", {}, "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA=="], + + "react-dom": ["react-dom@19.2.3", "", { "dependencies": { "scheduler": "^0.27.0" }, "peerDependencies": { "react": "^19.2.3" } }, "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg=="], + + "react-refresh": ["react-refresh@0.18.0", "", {}, "sha512-QgT5//D3jfjJb6Gsjxv0Slpj23ip+HtOpnNgnb2S5zU3CB26G/IDPGoy4RJB42wzFE46DRsstbW6tKHoKbhAxw=="], + + "react-remove-scroll": ["react-remove-scroll@2.6.2", "", { "dependencies": { "react-remove-scroll-bar": "^2.3.7", "react-style-singleton": "^2.2.1", "tslib": "^2.1.0", "use-callback-ref": "^1.3.3", "use-sidecar": "^1.1.2" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-KmONPx5fnlXYJQqC62Q+lwIeAk64ws/cUw6omIumRzMRPqgnYqhSSti99nbj0Ry13bv7dF+BKn7NB+OqkdZGTw=="], + + "react-remove-scroll-bar": ["react-remove-scroll-bar@2.3.8", "", { "dependencies": { "react-style-singleton": "^2.2.2", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" }, "optionalPeers": ["@types/react"] }, "sha512-9r+yi9+mgU33AKcj6IbT9oRCO78WriSj6t/cF8DWBZJ9aOGPOTEDvdUDz1FwKim7QXWwmHqtdHnRJfhAxEG46Q=="], + + "react-router": ["react-router@7.13.0", "", { "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" }, "optionalPeers": ["react-dom"] }, "sha512-PZgus8ETambRT17BUm/LL8lX3Of+oiLaPuVTRH3l1eLvSPpKO3AvhAEb5N7ihAFZQrYDqkvvWfFh9p0z9VsjLw=="], + + "react-router-dom": ["react-router-dom@7.13.0", "", { "dependencies": { "react-router": "7.13.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" } }, "sha512-5CO/l5Yahi2SKC6rGZ+HDEjpjkGaG/ncEP7eWFTvFxbHP8yeeI0PxTDjimtpXYlR3b3i9/WIL4VJttPrESIf2g=="], + + "react-style-singleton": ["react-style-singleton@2.2.3", "", { "dependencies": { "get-nonce": "^1.0.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-b6jSvxvVnyptAiLjbkWLE/lOnR4lfTtDAl+eUC7RZy+QQWc6wRzIV2CE6xBuMmDxc2qIihtDCZD5NPOFl7fRBQ=="], + + "rollup": ["rollup@4.56.0", "", { "dependencies": { "@types/estree": "1.0.8" }, "optionalDependencies": { "@rollup/rollup-android-arm-eabi": "4.56.0", "@rollup/rollup-android-arm64": "4.56.0", "@rollup/rollup-darwin-arm64": "4.56.0", "@rollup/rollup-darwin-x64": "4.56.0", "@rollup/rollup-freebsd-arm64": "4.56.0", "@rollup/rollup-freebsd-x64": "4.56.0", "@rollup/rollup-linux-arm-gnueabihf": "4.56.0", "@rollup/rollup-linux-arm-musleabihf": "4.56.0", "@rollup/rollup-linux-arm64-gnu": "4.56.0", "@rollup/rollup-linux-arm64-musl": "4.56.0", "@rollup/rollup-linux-loong64-gnu": "4.56.0", "@rollup/rollup-linux-loong64-musl": "4.56.0", "@rollup/rollup-linux-ppc64-gnu": "4.56.0", "@rollup/rollup-linux-ppc64-musl": "4.56.0", "@rollup/rollup-linux-riscv64-gnu": "4.56.0", "@rollup/rollup-linux-riscv64-musl": "4.56.0", "@rollup/rollup-linux-s390x-gnu": "4.56.0", "@rollup/rollup-linux-x64-gnu": "4.56.0", "@rollup/rollup-linux-x64-musl": "4.56.0", "@rollup/rollup-openbsd-x64": "4.56.0", "@rollup/rollup-openharmony-arm64": "4.56.0", "@rollup/rollup-win32-arm64-msvc": "4.56.0", "@rollup/rollup-win32-ia32-msvc": "4.56.0", "@rollup/rollup-win32-x64-gnu": "4.56.0", "@rollup/rollup-win32-x64-msvc": "4.56.0", "fsevents": "~2.3.2" }, "bin": { "rollup": "dist/bin/rollup" } }, "sha512-9FwVqlgUHzbXtDg9RCMgodF3Ua4Na6Gau+Sdt9vyCN4RhHfVKX2DCHy3BjMLTDd47ITDhYAnTwGulWTblJSDLg=="], + + "scheduler": ["scheduler@0.27.0", "", {}, "sha512-eNv+WrVbKu1f3vbYJT/xtiF5syA5HPIMtf9IgY/nKg0sWqzAUEvqY/xm7OcZc/qafLx/iO9FgOmeSAp4v5ti/Q=="], + + "semver": ["semver@6.3.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA=="], + + "set-cookie-parser": ["set-cookie-parser@2.7.2", "", {}, "sha512-oeM1lpU/UvhTxw+g3cIfxXHyJRc/uidd3yK1P242gzHds0udQBYzs3y8j4gCCW+ZJ7ad0yctld8RYO+bdurlvw=="], + + "source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="], + + "tinyglobby": ["tinyglobby@0.2.15", "", { "dependencies": { "fdir": "^6.5.0", "picomatch": "^4.0.3" } }, "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ=="], + + "tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="], + + "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], + + "ua-parser-js": ["ua-parser-js@1.0.41", "", { "bin": { "ua-parser-js": "script/cli.js" } }, "sha512-LbBDqdIC5s8iROCUjMbW1f5dJQTEFB1+KO9ogbvlb3nm9n4YHa5p4KTvFPWvh2Hs8gZMBuiB1/8+pdfe/tDPug=="], + + "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], + + "update-browserslist-db": ["update-browserslist-db@1.2.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w=="], + + "use-callback-ref": ["use-callback-ref@1.3.3", "", { "dependencies": { "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-jQL3lRnocaFtu3V00JToYz/4QkNWswxijDaCVNZRiRTO3HQDLsdu1ZtmIUvV4yPp+rvWm5j0y0TG/S61cuijTg=="], + + "use-sidecar": ["use-sidecar@1.1.3", "", { "dependencies": { "detect-node-es": "^1.1.0", "tslib": "^2.0.0" }, "peerDependencies": { "@types/react": "*", "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 || ^19.0.0-rc" }, "optionalPeers": ["@types/react"] }, "sha512-Fedw0aZvkhynoPYlA5WXrMCAMm+nSWdZt6lzJQ7Ok8S6Q+VsHmHpRWndVRJ8Be0ZbkfPc5LRYH+5XrzXcEeLRQ=="], + + "use-sync-external-store": ["use-sync-external-store@1.4.0", "", { "peerDependencies": { "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" } }, "sha512-9WXSPC5fMv61vaupRkCKCxsPxBocVnwakBEkMIHHpkTTg6icbJtg6jzgtLDm4bl3cSHAca52rYWih0k4K3PfHw=="], + + "viem": ["viem@2.43.2", "", { "dependencies": { "@noble/curves": "1.9.1", "@noble/hashes": "1.8.0", "@scure/bip32": "1.7.0", "@scure/bip39": "1.6.0", "abitype": "1.2.3", "isows": "1.0.7", "ox": "0.10.6", "ws": "8.18.3" }, "peerDependencies": { "typescript": ">=5.0.4" }, "optionalPeers": ["typescript"] }, "sha512-9fLAuPArLHnePaXiyj1jHsB7AaMXMD1WCV3q9QhpJk3+O6u8R5Ey7XjTIx4e2n4OrtkL3tcJDK9qVL770+SVyA=="], + + "vite": ["vite@7.3.1", "", { "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", "rollup": "^4.43.0", "tinyglobby": "^0.2.15" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^20.19.0 || >=22.12.0", "jiti": ">=1.21.0", "less": "^4.0.0", "lightningcss": "^1.21.0", "sass": "^1.70.0", "sass-embedded": "^1.70.0", "stylus": ">=0.54.8", "sugarss": "^5.0.0", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA=="], + + "wagmi": ["wagmi@3.4.1", "", { "dependencies": { "@wagmi/connectors": "7.1.5", "@wagmi/core": "3.3.1", "use-sync-external-store": "1.4.0" }, "peerDependencies": { "@tanstack/react-query": ">=5.0.0", "react": ">=18", "typescript": ">=5.7.3", "viem": "2.x" }, "optionalPeers": ["typescript"] }, "sha512-v6svxWxfIqV82lXNclOMn+h0SYCtXtxf0HWCwyjIJPZH1SR7yRqyQguWUDQtzvNSefFQEoCk+MVOX9nTR5d4Zw=="], + + "ws": ["ws@8.18.3", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg=="], + + "yallist": ["yallist@3.1.1", "", {}, "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="], + + "zustand": ["zustand@5.0.0", "", { "peerDependencies": { "@types/react": ">=18.0.0", "immer": ">=9.0.6", "react": ">=18.0.0", "use-sync-external-store": ">=1.2.0" }, "optionalPeers": ["@types/react", "immer", "react", "use-sync-external-store"] }, "sha512-LE+VcmbartOPM+auOjCCLQOsQ05zUTp8RkgwRzefUk+2jISdMMFnxvyTjA4YNWr5ZGXYbVsEMZosttuxUBkojQ=="], + + "@babel/helper-compilation-targets/lru-cache": ["lru-cache@5.1.1", "", { "dependencies": { "yallist": "^3.0.2" } }, "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w=="], + + "@wagmi/core/ox": ["ox@0.11.3", "", { "dependencies": { "@adraffy/ens-normalize": "^1.11.0", "@noble/ciphers": "^1.3.0", "@noble/curves": "1.9.1", "@noble/hashes": "^1.8.0", "@scure/bip32": "^1.7.0", "@scure/bip39": "^1.6.0", "abitype": "^1.2.3", "eventemitter3": "5.0.1" }, "peerDependencies": { "typescript": ">=5.4.0" }, "optionalPeers": ["typescript"] }, "sha512-1bWYGk/xZel3xro3l8WGg6eq4YEKlaqvyMtVhfMFpbJzK2F6rj4EDRtqDCWVEJMkzcmEi9uW2QxsqELokOlarw=="], + } +} diff --git a/website/css/docs.css b/website/css/docs.css new file mode 100644 index 0000000..a785e42 --- /dev/null +++ b/website/css/docs.css @@ -0,0 +1,360 @@ +/* Docs Layout */ +.docs-layout { + display: flex; + padding-top: 80px; + min-height: 100vh; + position: relative; +} + +/* Sidebar */ +.sidebar { + position: fixed; + top: 80px; + left: 0; + width: 280px; + height: calc(100vh - 80px); + padding: 2rem; + background: rgba(8, 8, 12, 0.6); + /* var(--bg-depth) with opacity */ + backdrop-filter: blur(10px); + border-right: 1px solid var(--border); + overflow-y: auto; + z-index: 50; +} + +.sidebar-section { + margin-bottom: 2rem; +} + +.sidebar-section h3 { + font-family: var(--font-mono); + font-size: 0.75rem; + font-weight: 600; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.1em; + margin-bottom: 0.75rem; +} + +.sidebar-section a { + display: block; + padding: 0.4rem 0; + color: var(--text-muted); + text-decoration: none; + font-size: 0.9rem; + transition: all 0.2s; + border-left: 2px solid transparent; + padding-left: 1rem; + margin-left: -1rem; +} + +.sidebar-section a:hover, +.sidebar-section a.active { + color: var(--primary); + border-left-color: var(--primary); + background: linear-gradient(90deg, rgba(0, 242, 255, 0.05) 0%, transparent 100%); +} + +/* Main Content */ +.docs-content { + flex: 1; + margin-left: 280px; + padding: 3rem 4rem; + max-width: 1000px; +} + +.docs-content h1 { + font-size: 3rem; + margin-bottom: 1.5rem; + background: linear-gradient(135deg, #fff 0%, var(--primary) 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.docs-content h2 { + font-size: 2rem; + margin-top: 3.5rem; + margin-bottom: 1.5rem; + padding-top: 1.5rem; + border-top: 1px solid var(--border); +} + +.docs-content h3 { + font-size: 1.5rem; + margin-top: 2.5rem; + margin-bottom: 1rem; + color: var(--text-main); +} + +.docs-content p, +.docs-content li { + color: var(--text-muted); + line-height: 1.7; + margin-bottom: 1rem; + font-size: 1.05rem; +} + +.docs-content ul, +.docs-content ol { + margin-bottom: 1.5rem; + padding-left: 1.5rem; +} + +.docs-content a { + color: var(--primary); + text-decoration: none; + transition: all 0.2s; + border-bottom: 1px solid transparent; +} + +.docs-content a:not(.card):not(.exchange-card):hover { + border-bottom-color: var(--primary); + text-shadow: 0 0 10px rgba(0, 242, 255, 0.3); +} + +/* Code Blocks in Docs */ +.docs-content .code-block { + background: #0d0d12; + border: 1px solid var(--border); + border-radius: 12px; + margin: 1.5rem 0; + overflow: hidden; +} + +.docs-content .code-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 0.75rem 1rem; + background: rgba(255, 255, 255, 0.03); + border-bottom: 1px solid var(--border); +} + +.docs-content .dots { + display: flex; + gap: 6px; +} + +.docs-content .dot { + width: 10px; + height: 10px; + border-radius: 50%; +} + +.docs-content .dot:nth-child(1) { + background: #ff5f57; +} + +.docs-content .dot:nth-child(2) { + background: #ffbd2e; +} + +.docs-content .dot:nth-child(3) { + background: #28c840; +} + +.docs-content .filename { + font-family: var(--font-mono); + font-size: 0.8rem; + color: var(--text-muted); +} + +.docs-content pre { + padding: 1.5rem; + overflow-x: auto; + font-family: var(--font-mono); + font-size: 0.9rem; + line-height: 1.7; + color: var(--text-main); +} + +/* Inline Code */ +code { + font-family: var(--font-mono); + font-size: 0.85em; + background: rgba(0, 242, 255, 0.1); + padding: 0.2em 0.5em; + border-radius: 4px; + color: var(--primary); +} + +pre code { + background: none; + padding: 0; + color: inherit; + border-radius: 0; +} + +/* Cards & Grids */ +.card-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); + gap: 1.5rem; + margin: 2rem 0; +} + +.card { + background: linear-gradient(135deg, rgba(255, 255, 255, 0.03) 0%, rgba(255, 255, 255, 0.01) 100%); + border: 1px solid var(--border); + border-radius: 12px; + padding: 1.5rem; + transition: all 0.3s; +} + +.card:hover { + border-color: rgba(0, 242, 255, 0.3); + transform: translateY(-2px); + background: linear-gradient(135deg, rgba(0, 242, 255, 0.05) 0%, transparent 100%); +} + +.card h4 { + font-size: 1.2rem; + margin-bottom: 0.5rem; + color: var(--text-main); +} + +.card p { + font-size: 0.95rem; + margin: 0; +} + +.card-icon { + width: 40px; + height: 40px; + display: flex; + align-items: center; + justify-content: center; + background: rgba(0, 242, 255, 0.1); + border-radius: 8px; + margin-bottom: 1rem; + color: var(--primary); +} + +.card-icon svg { + width: 20px; + height: 20px; +} + +/* Tables */ +table { + width: 100%; + border-collapse: collapse; + margin: 2rem 0; + border-radius: 8px; + overflow: hidden; + border: 1px solid var(--border); +} + +th, +td { + padding: 1rem; + text-align: left; + border-bottom: 1px solid var(--border); +} + +th { + font-weight: 600; + color: var(--primary); + background: rgba(0, 242, 255, 0.05); + font-family: var(--font-mono); + font-size: 0.9rem; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +td { + color: var(--text-secondary); +} + +tr:last-child td { + border-bottom: none; +} + +tr:hover td { + background: rgba(255, 255, 255, 0.02); +} + +/* Callouts */ +.callout { + padding: 1.25rem 1.5rem; + border-radius: 8px; + margin: 2rem 0; + border-left: 4px solid; + background: rgba(255, 255, 255, 0.02); +} + +.callout-info { + border-color: var(--primary); + background: rgba(0, 242, 255, 0.05); +} + +.callout-warning { + border-color: #ffbd2e; + background: rgba(255, 189, 46, 0.05); +} + +.callout p { + margin: 0; + color: var(--text-main); +} + +/* Exchange Grid */ +.exchange-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); + gap: 1.5rem; + margin: 2rem 0; +} + +.exchange-card { + display: flex; + flex-direction: column; + align-items: center; + padding: 2rem; + background: rgba(255, 255, 255, 0.02); + border: 1px solid var(--border); + border-radius: 12px; + text-decoration: none; + transition: all 0.3s; +} + +.exchange-card:hover { + border-color: var(--primary); + transform: translateY(-3px); + box-shadow: 0 10px 30px -10px rgba(0, 242, 255, 0.15); +} + +.exchange-card img { + width: 56px; + height: 56px; + border-radius: 12px; + margin-bottom: 1rem; + filter: grayscale(0.5); + transition: filter 0.3s; +} + +.exchange-card:hover img { + filter: grayscale(0); +} + +.exchange-card span { + color: var(--text-main); + font-weight: 500; +} + +/* Responsive */ +@media (max-width: 1024px) { + .sidebar { + display: none; + /* In a real app we'd add a mobile menu toggle */ + } + + .docs-content { + margin-left: 0; + padding: 2rem; + } + + footer { + padding-left: 2rem; + } +} \ No newline at end of file diff --git a/website/css/style.css b/website/css/style.css new file mode 100644 index 0000000..5777175 --- /dev/null +++ b/website/css/style.css @@ -0,0 +1,469 @@ +@import url('https://fonts.googleapis.com/css2?family=JetBrains+Mono:wght@400;500;600&family=Outfit:wght@300;400;500;600;700&display=swap'); + +:root { + --bg-void: #030305; + --bg-depth: #08080c; + --primary: #00f2ff; + --primary-dim: rgba(0, 242, 255, 0.1); + --primary-glow: 0 0 20px rgba(0, 242, 255, 0.4); + --accent: #5d00ff; + --text-main: #ffffff; + --text-muted: #8892b0; + --border: rgba(255, 255, 255, 0.08); + --glass: rgba(10, 10, 16, 0.6); + --glass-border: rgba(255, 255, 255, 0.05); + + --font-sans: 'Outfit', -apple-system, system-ui, sans-serif; + --font-mono: 'JetBrains Mono', monospace; +} + +* { + margin: 0; + padding: 0; + box-sizing: border-box; + -webkit-font-smoothing: antialiased; +} + +html { + scroll-behavior: smooth; + background-color: var(--bg-void); + color: var(--text-main); +} + +body { + font-family: var(--font-sans); + overflow-x: hidden; + min-height: 100vh; + background-image: + radial-gradient(circle at 10% 20%, rgba(93, 0, 255, 0.05) 0%, transparent 40%), + radial-gradient(circle at 90% 80%, rgba(0, 242, 255, 0.05) 0%, transparent 40%); +} + +/* Typography */ +h1, h2, h3, h4 { + font-weight: 700; + line-height: 1.1; + color: var(--text-main); +} + +a { + text-decoration: none; + color: inherit; + transition: all 0.2s ease; +} + +/* Utilities */ +.container { + max-width: 1200px; + margin: 0 auto; + padding: 0 2rem; +} + +.text-glow { + text-shadow: 0 0 15px rgba(0, 242, 255, 0.3); +} + +.gradient-text { + background: linear-gradient(135deg, #fff 0%, var(--primary) 100%); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; +} + +/* Navigation */ +nav { + position: fixed; + top: 0; + left: 0; + width: 100%; + z-index: 100; + padding: 1.5rem 0; + backdrop-filter: blur(12px); + border-bottom: 1px solid rgba(255,255,255,0.03); + transition: all 0.3s ease; +} + +nav.scrolled { + background: rgba(3, 3, 5, 0.85); + padding: 1rem 0; +} + +.nav-inner { + display: flex; + justify-content: space-between; + align-items: center; +} + +.logo { + font-family: var(--font-mono); + font-weight: 700; + font-size: 1.25rem; + color: var(--text-main); + display: flex; + align-items: center; + gap: 0.5rem; +} + +.logo span { + color: var(--primary); +} + +.nav-links { + display: flex; + gap: 2rem; + align-items: center; +} + +.nav-link { + color: var(--text-muted); + font-size: 0.95rem; + font-weight: 500; +} + +.nav-link:hover { + color: var(--primary); + text-shadow: 0 0 8px var(--primary-dim); +} + +/* Hero Section */ +.hero { + min-height: 100vh; + display: flex; + align-items: center; + justify-content: center; + position: relative; + padding-top: 5rem; + overflow: hidden; +} + +.hero-bg-glow { + position: absolute; + width: 600px; + height: 600px; + background: radial-gradient(circle, var(--primary-dim) 0%, transparent 70%); + top: 50%; + left: 50%; + transform: translate(-50%, -50%); + filter: blur(80px); + z-index: 0; + opacity: 0.6; + animation: pulse-glow 8s infinite alternate; +} + +@keyframes pulse-glow { + 0% { opacity: 0.4; transform: translate(-50%, -50%) scale(0.8); } + 100% { opacity: 0.7; transform: translate(-50%, -50%) scale(1.1); } +} + +.hero-content { + position: relative; + z-index: 2; + text-align: center; + max-width: 800px; +} + +.badge { + display: inline-block; + padding: 0.35rem 1rem; + border-radius: 50px; + background: rgba(0, 242, 255, 0.05); + border: 1px solid rgba(0, 242, 255, 0.2); + color: var(--primary); + font-family: var(--font-mono); + font-size: 0.8rem; + margin-bottom: 2rem; + backdrop-filter: blur(5px); +} + +.hero h1 { + font-size: clamp(3rem, 8vw, 6rem); + letter-spacing: -0.04em; + margin-bottom: 1.5rem; + background: linear-gradient(to bottom, #fff, #a5b4fc); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; +} + +.hero h1 span { + color: var(--primary); + -webkit-text-fill-color: var(--primary); + text-shadow: 0 0 20px rgba(0, 242, 255, 0.5); +} + +.hero p { + font-size: 1.25rem; + color: var(--text-muted); + line-height: 1.6; + margin-bottom: 2.5rem; + max-width: 600px; + margin-left: auto; + margin-right: auto; +} + +.btn-group { + display: flex; + gap: 1rem; + justify-content: center; +} + +.btn { + padding: 1rem 2rem; + border-radius: 8px; + font-weight: 600; + font-size: 1rem; + cursor: pointer; + display: inline-flex; + align-items: center; + gap: 0.75rem; + transition: all 0.3s cubic-bezier(0.2, 0.8, 0.2, 1); +} + +.btn-primary { + background: var(--primary); + color: black; + box-shadow: 0 0 20px rgba(0, 242, 255, 0.2); +} + +.btn-primary:hover { + transform: translateY(-2px); + box-shadow: 0 0 30px rgba(0, 242, 255, 0.4); +} + +.btn-secondary { + background: rgba(255, 255, 255, 0.03); + border: 1px solid var(--border); + color: var(--text-main); + backdrop-filter: blur(10px); +} + +.btn-secondary:hover { + background: rgba(255, 255, 255, 0.08); + border-color: rgba(255, 255, 255, 0.2); + transform: translateY(-2px); +} + +/* Logos */ +.logos { + margin-top: 6rem; + padding-top: 3rem; + border-top: 1px solid var(--border); + display: flex; + flex-direction: column; + align-items: center; + gap: 1.5rem; +} + +.logos p { + font-family: var(--font-mono); + font-size: 0.8rem; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.1em; +} + +.logo-grid { + display: flex; + gap: 3rem; + opacity: 0.6; + filter: grayscale(1); + transition: all 0.4s ease; +} + +.logo-grid:hover { + opacity: 1; + filter: grayscale(0); +} + +.logo-grid img { + height: 32px; + width: auto; + transition: transform 0.3s ease; +} + +.logo-grid img:hover { + transform: scale(1.1); +} + +/* Features */ +.features { + padding: 8rem 0; + background: var(--bg-depth); + position: relative; +} + +.section-title { + text-align: center; + margin-bottom: 5rem; +} + +.section-title h2 { + font-size: 3rem; + margin-bottom: 1rem; +} + +.section-title p { + color: var(--text-muted); + font-size: 1.1rem; +} + +.feature-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(350px, 1fr)); + gap: 2rem; +} + +.feature-card { + background: linear-gradient(180deg, rgba(255,255,255,0.03) 0%, rgba(255,255,255,0) 100%); + border: 1px solid var(--border); + padding: 2.5rem; + border-radius: 16px; + transition: all 0.3s ease; + position: relative; + overflow: hidden; +} + +.feature-card::before { + content: ''; + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + background: radial-gradient(800px circle at var(--mouse-x) var(--mouse-y), rgba(0, 242, 255, 0.04), transparent 40%); + z-index: 1; + opacity: 0; + transition: opacity 0.3s; +} + +.feature-card:hover::before { + opacity: 1; +} + +.feature-card:hover { + border-color: rgba(0, 242, 255, 0.3); + transform: translateY(-5px); +} + +.icon-box { + width: 50px; + height: 50px; + border-radius: 12px; + background: rgba(0, 242, 255, 0.1); + display: flex; + align-items: center; + justify-content: center; + color: var(--primary); + margin-bottom: 1.5rem; +} + +.icon-box svg { + width: 24px; + height: 24px; +} + +.feature-card h3 { + font-size: 1.4rem; + margin-bottom: 1rem; +} + +.feature-card p { + color: var(--text-muted); + line-height: 1.6; +} + +/* Code Section */ +.code-showcase { + padding: 8rem 0; + position: relative; +} + +.code-window { + background: #0d0d12; /* slightly lighter than void */ + border-radius: 12px; + border: 1px solid var(--border); + box-shadow: 0 20px 50px rgba(0,0,0,0.5); + overflow: hidden; + max-width: 900px; + margin: 0 auto; +} + +.window-header { + background: rgba(255,255,255,0.03); + padding: 0.8rem 1.25rem; + display: flex; + align-items: center; + border-bottom: 1px solid var(--border); +} + +.window-controls { + display: flex; + gap: 8px; +} + +.control { + width: 12px; + height: 12px; + border-radius: 50%; +} + +.close { background: #ff5f56; } +.minimize { background: #ffbd2e; } +.maximize { background: #27c93f; } + +.window-title { + margin-left: 1rem; + font-family: var(--font-mono); + font-size: 0.85rem; + color: var(--text-muted); +} + +.code-content { + padding: 2rem; + overflow-x: auto; + font-family: var(--font-mono); + font-size: 0.95rem; + line-height: 1.7; +} + +/* Syntax Highlighting */ +.kwd { color: #ff79c6; } +.str { color: #f1fa8c; } +.comment { color: #6272a4; font-style: italic; } +.func { color: #8be9fd; } +.num { color: #bd93f9; } +.cls { color: #50fa7b; } + +/* Footer */ +footer { + padding: 4rem 0; + border-top: 1px solid var(--border); + text-align: center; + color: var(--text-muted); +} + +.footer-socials { + display: flex; + justify-content: center; + gap: 1.5rem; + margin-bottom: 2rem; +} + +.social-link { + color: var(--text-muted); + transition: color 0.2s; +} + +.social-link:hover { + color: var(--primary); +} + +@media (max-width: 768px) { + .logo-grid { + flex-wrap: wrap; + justify-content: center; + } + + .hero h1 { + font-size: 3rem; + } +} diff --git a/website/docs.html b/website/docs.html deleted file mode 100644 index a2f89ce..0000000 --- a/website/docs.html +++ /dev/null @@ -1,1208 +0,0 @@ - - - - - - Documentation | dr-manhattan - - - - - - - -
-
- - - -
- - -
-
-

Documentation

-

dr-manhattan is a CCXT-style unified API for prediction markets. It provides a simple, scalable, and extensible interface to interact with multiple prediction market platforms.

- -
-
-
- -
-

Unified Interface

-

One API for all prediction markets. Write once, deploy anywhere.

-
-
-
- -
-

Real-time Data

-

WebSocket support for live orderbook and trade updates.

-
-
-
- -
-

Type Safe

-

Full type hints throughout for better IDE support.

-
-
-
- -
-

Installation

-

Install dr-manhattan using uv (recommended):

- -
-
-
- - - -
- terminal -
-
# Create virtual environment and install
-uv venv
-uv pip install -e .
-
-# Or install directly from GitHub
-uv pip install -e git+https://github.com/guzus/dr-manhattan
-
- -
-

Note: dr-manhattan requires Python 3.11 or higher.

-
-
- -
-

Quick Start

-

Here's a simple example to get you started:

- -
-
-
- - - -
- example.py -
-
import dr_manhattan
-
-# Initialize any exchange
-polymarket = dr_manhattan.Polymarket({'timeout': 30})
-opinion = dr_manhattan.Opinion({'timeout': 30})
-limitless = dr_manhattan.Limitless({'timeout': 30})
-predictfun = dr_manhattan.PredictFun({'timeout': 30})
-
-# Fetch markets
-markets = polymarket.fetch_markets()
-
-for market in markets:
-    print(f"{market.question}: {market.prices}")
-
-
- -
-

API Reference

-

All exchanges implement the same base interface, making it easy to switch between platforms or build cross-exchange applications.

- -

Exchange Factory

-

Use the exchange factory to dynamically create exchange instances:

- -
-
-
- - - -
- factory.py -
-
from dr_manhattan import create_exchange, list_exchanges
-
-# List available exchanges
-print(list_exchanges())
-# ['polymarket', 'kalshi', 'limitless', 'opinion', 'predictfun']
-
-# Create exchange by name
-exchange = create_exchange('polymarket', {'timeout': 30})
-
-
- -
-

Markets

-

Fetch and query prediction markets:

- - - - - - - - - - - - - - - - - - - - - - -
MethodDescription
fetch_markets()Fetch all available markets
fetch_market(market_id)Fetch a specific market by ID
fetch_orderbook(market_id)Get the orderbook for a market
- -

Market Model

-
-
-
- - - -
- models/market.py -
-
class Market:
-    id: str              # Unique market identifier
-    question: str        # Market question
-    outcomes: list       # Available outcomes (e.g., ["Yes", "No"])
-    prices: dict         # Current prices for each outcome
-    volume: float        # Total trading volume
-    close_time: datetime # When the market closes
-    status: str          # Market status (open, closed, resolved)
-
-
- -
-

Orders

-

Create and manage orders:

- -
-
-
- - - -
- trading.py -
-
import dr_manhattan
-
-# Initialize with authentication
-polymarket = dr_manhattan.Polymarket({
-    'private_key': 'your_private_key',
-    'funder': 'your_funder_address',
-})
-
-# Create a buy order
-order = polymarket.create_order(
-    market_id="market_123",
-    outcome="Yes",
-    side=dr_manhattan.OrderSide.BUY,
-    price=0.65,
-    size=100,
-    params={'token_id': 'token_id'}
-)
-
-# Cancel an order
-polymarket.cancel_order(order.id)
-
-
- -
-

Positions

-

Track your positions and balances:

- -
-
-
- - - -
- positions.py -
-
# Fetch balance
-balance = polymarket.fetch_balance()
-print(f"USDC: {balance['USDC']}")
-
-# Fetch positions
-positions = polymarket.fetch_positions()
-for pos in positions:
-    print(f"{pos.market_id}: {pos.size} @ {pos.avg_price}")
-
-
- -
-

WebSockets

-

Subscribe to real-time market data:

- -
-
-
- - - -
- websocket.py -
-
import asyncio
-from dr_manhattan import PolymarketWS
-
-async def main():
-    ws = PolymarketWS()
-
-    async def on_orderbook(data):
-        print(f"Orderbook update: {data}")
-
-    await ws.subscribe_orderbook("market_id", on_orderbook)
-    await ws.run()
-
-asyncio.run(main())
-
-
- -
-

Supported Exchanges

-

dr-manhattan supports the following prediction market exchanges:

- - -
- -
-

Polymarket

-

Polymarket is the leading prediction market on Polygon. It uses USDC for trading and requires a wallet for authentication.

- -
-
-
- - - -
- polymarket_example.py -
-
import dr_manhattan
-
-polymarket = dr_manhattan.Polymarket({
-    'private_key': 'your_private_key',
-    'funder': 'your_funder_address',
-})
-
-# Fetch active markets
-markets = polymarket.fetch_markets()
-
-
- -
-

Kalshi

-

Kalshi is a US-regulated prediction market exchange. It uses RSA-PSS authentication.

- -
-
-
- - - -
- kalshi_example.py -
-
import dr_manhattan
-
-kalshi = dr_manhattan.Kalshi({
-    'api_key': 'your_api_key',
-    'private_key_path': '/path/to/private_key.pem',
-})
-
-
- -
-

Opinion

-

Opinion is a prediction market on BNB Chain.

- -
-
-
- - - -
- opinion_example.py -
-
import dr_manhattan
-
-opinion = dr_manhattan.Opinion({
-    'api_key': 'your_api_key',
-    'private_key': 'your_private_key',
-    'multi_sig_addr': 'your_multi_sig_addr'
-})
-
-
- -
-

Limitless

-

Limitless is a prediction market platform with WebSocket support.

- -
-
-
- - - -
- limitless_example.py -
-
import dr_manhattan
-
-limitless = dr_manhattan.Limitless({
-    'private_key': 'your_private_key',
-    'timeout': 30
-})
-
-
- -
-

Predict.fun

-

Predict.fun is a prediction market on BNB Chain with smart wallet support.

- -
-
-
- - - -
- predictfun_example.py -
-
import dr_manhattan
-
-predictfun = dr_manhattan.PredictFun({
-    'api_key': 'your_api_key',
-    'private_key': 'your_private_key',
-    'use_smart_wallet': True,
-    'smart_wallet_owner_private_key': 'your_owner_private_key',
-    'smart_wallet_address': 'your_smart_wallet_address'
-})
-
-
- -
-

Strategy Framework

-

dr-manhattan provides a base class for building trading strategies with order tracking, position management, and event logging.

- -
-
-
- - - -
- my_strategy.py -
-
from dr_manhattan import Strategy
-
-class MyStrategy(Strategy):
-    def on_tick(self):
-        self.log_status()
-        self.place_bbo_orders()
-
-# Run the strategy
-strategy = MyStrategy(exchange, market_id="123")
-strategy.run()
-
-
- -
-

Spread Strategy

-

The spread strategy implements BBO (Best Bid/Offer) market making. It places orders at the best bid and ask prices with a configurable spread.

- -
-
-
- - - -
- terminal -
-
uv run python examples/spread_strategy.py --exchange polymarket --slug fed-decision
-uv run python examples/spread_strategy.py --exchange opinion --market-id 813
-
-
- -
-

Spike Strategy

-

The spike strategy implements mean reversion trading. It detects price spikes and places counter-trend orders.

-
- -
-

Architecture

-

dr-manhattan follows a clean, modular architecture:

- -
-
-
- - - -
- structure -
-
dr_manhattan/
-├── base/               # Core abstractions
-│   ├── exchange.py     # Abstract base class for exchanges
-│   ├── exchange_client.py  # High-level trading client
-│   ├── exchange_factory.py # Exchange instantiation
-│   ├── strategy.py     # Strategy base class
-│   ├── order_tracker.py    # Order event tracking
-│   ├── websocket.py    # WebSocket base class
-│   └── errors.py       # Exception hierarchy
-├── exchanges/          # Exchange implementations
-│   ├── polymarket.py
-│   ├── polymarket_ws.py
-│   ├── kalshi.py
-│   ├── opinion.py
-│   ├── limitless.py
-│   ├── limitless_ws.py
-│   ├── predictfun.py
-│   └── predictfun_ws.py
-├── models/             # Data models
-│   ├── market.py
-│   ├── order.py
-│   ├── orderbook.py
-│   └── position.py
-├── strategies/         # Strategy implementations
-└── utils/              # Utilities
-
- -

Design Principles

-
    -
  • Unified Interface: All exchanges implement the same Exchange base class
  • -
  • Scalability: Adding new exchanges is straightforward - just implement the abstract methods
  • -
  • Simplicity: Clean abstractions with minimal dependencies
  • -
  • Type Safety: Full type hints throughout the codebase
  • -
-
- -
-

Error Handling

-

All errors inherit from DrManhattanError:

- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
ErrorDescription
ExchangeErrorExchange-specific errors
NetworkErrorConnectivity issues
RateLimitErrorRate limit exceeded
AuthenticationErrorAuth failures
InsufficientFundsNot enough balance
InvalidOrderInvalid order parameters
MarketNotFoundMarket doesn't exist
-
- -
-

MCP Server

-

Trade prediction markets directly from Claude using the Model Context Protocol (MCP).

- -
-
-
- - - -
- terminal -
-
# Install with MCP dependencies
-uv sync --extra mcp
-
-# Configure credentials
-cp .env.example .env
-# Edit .env with your POLYMARKET_PRIVATE_KEY and POLYMARKET_FUNDER
-
- -

Add to your Claude Code settings (~/.claude/settings.json or project .mcp.json):

- -
-
-
- - - -
- settings.json -
-
{
-  "mcpServers": {
-    "dr-manhattan": {
-      "command": "/path/to/dr-manhattan/.venv/bin/python",
-      "args": ["-m", "dr_manhattan.mcp.server"],
-      "cwd": "/path/to/dr-manhattan"
-    }
-  }
-}
-
- -

After restarting, you can:

-
    -
  • "Show my Polymarket balance"
  • -
  • "Find active prediction markets"
  • -
  • "Buy 10 USDC of Yes on market X at 0.55"
  • -
-
-
-
- - - - - - diff --git a/website/index.html b/website/index.html index a96e209..a5e879c 100644 --- a/website/index.html +++ b/website/index.html @@ -4,871 +4,13 @@ dr-manhattan | Unified API for Prediction Markets - + - - - - -
- -
-
Polymarket
-
Kalshi
-
Opinion
-
Limitless
-
Predict.fun
-
- - -
- - -
-
dr-manhattan
-
-
- -
-
-
Open Source
-

dr-manhattan

-

CCXT for prediction markets. Simple, scalable, and easy to extend.

- - -
- Supported Exchanges -
- - - - - -
-
-
-
- -
-
-

Simple, Unified Interface

-

Write exchange-agnostic code that works across all prediction markets

-
- -
-
- - - - example.py -
-
-
import dr_manhattan
-
-# Initialize any exchange with the same interface
-polymarket = dr_manhattan.Polymarket({'timeout': 30})
-opinion = dr_manhattan.Opinion({'timeout': 30})
-limitless = dr_manhattan.Limitless({'timeout': 30})
-
-# Fetch markets from any platform
-markets = polymarket.fetch_markets()
-
-for market in markets:
-    print(f"{market.question}: {market.prices}")
-
-
-
- -
-
-

Built for Developers

-

Everything you need to build prediction market applications

-
- -
-
-
- -
-

Unified Interface

-

One API to rule them all. Write code once and deploy across Polymarket, Kalshi, Opinion, and Limitless.

-
- -
-
- -
-

WebSocket Support

-

Real-time market data streaming with built-in WebSocket connections for live orderbook and trade updates.

-
- -
-
- -
-

Strategy Framework

-

Base class for building trading strategies with order tracking, position management, and event logging.

-
- -
-
- -
-

Easily Extensible

-

Add new exchanges by implementing abstract methods. Clean architecture makes integration straightforward.

-
- -
-
- -
-

Type Safe

-

Full type hints throughout the codebase. Catch errors early and enjoy superior IDE autocomplete.

-
- -
-
- -
-

Order Management

-

Create, cancel, and track orders with standardized error handling across all supported exchanges.

-
-
-
- -
-
-

Get Started in Seconds

-

Install with uv and start building

-
- -
- uv pip install -e git+https://github.com/guzus/dr-manhattan - -
-
- - - - +
+ diff --git a/website/js/main.js b/website/js/main.js new file mode 100644 index 0000000..a333692 --- /dev/null +++ b/website/js/main.js @@ -0,0 +1,43 @@ +document.addEventListener('DOMContentLoaded', () => { + // Spotlight effect for cards + const cards = document.querySelectorAll('.feature-card'); + + document.addEventListener('mousemove', (e) => { + cards.forEach(card => { + const rect = card.getBoundingClientRect(); + const x = e.clientX - rect.left; + const y = e.clientY - rect.top; + + card.style.setProperty('--mouse-x', `${x}px`); + card.style.setProperty('--mouse-y', `${y}px`); + }); + }); + + // Navbar scroll effect + const nav = document.querySelector('nav'); + window.addEventListener('scroll', () => { + if (window.scrollY > 50) { + nav.classList.add('scrolled'); + } else { + nav.classList.remove('scrolled'); + } + }); + + // Smooth reveal animation + const observerOptions = { + threshold: 0.1 + }; + + const observer = new IntersectionObserver((entries) => { + entries.forEach(entry => { + if (entry.isIntersecting) { + entry.target.classList.add('visible'); + observer.unobserve(entry.target); + } + }); + }, observerOptions); + + document.querySelectorAll('.animate-on-scroll').forEach(el => { + observer.observe(el); + }); +}); diff --git a/website/netlify.toml b/website/netlify.toml new file mode 100644 index 0000000..d5083ef --- /dev/null +++ b/website/netlify.toml @@ -0,0 +1,9 @@ +[build] + publish = "dist" + command = "bun install && bun run build" + +[[redirects]] + from = "/*" + to = "/index.html" + status = 200 + force = false diff --git a/website/package.json b/website/package.json new file mode 100644 index 0000000..0b93e47 --- /dev/null +++ b/website/package.json @@ -0,0 +1,30 @@ +{ + "name": "dr-manhattan-website", + "version": "1.0.0", + "type": "module", + "private": true, + "scripts": { + "dev": "vite", + "build": "vite build", + "preview": "vite preview" + }, + "devDependencies": { + "@types/bun": "latest", + "@types/react": "^19.2.9", + "@types/react-dom": "^19.2.3" + }, + "peerDependencies": { + "typescript": "^5" + }, + "dependencies": { + "@rainbow-me/rainbowkit": "^2.2.10", + "@tanstack/react-query": "^5.90.20", + "@vitejs/plugin-react": "^5.1.2", + "react": "^19.2.3", + "react-dom": "^19.2.3", + "react-router-dom": "^7.13.0", + "viem": "2.43.2", + "vite": "^7.3.1", + "wagmi": "^3.4.1" + } +} diff --git a/website/public/assets/claude.png b/website/public/assets/claude.png new file mode 100644 index 0000000..64b6a21 Binary files /dev/null and b/website/public/assets/claude.png differ diff --git a/website/public/assets/favicon.jpg b/website/public/assets/favicon.jpg new file mode 100644 index 0000000..2b34c65 Binary files /dev/null and b/website/public/assets/favicon.jpg differ diff --git a/website/public/assets/kalshi.jpeg b/website/public/assets/kalshi.jpeg new file mode 100644 index 0000000..8c8342c Binary files /dev/null and b/website/public/assets/kalshi.jpeg differ diff --git a/website/public/assets/limitless.jpg b/website/public/assets/limitless.jpg new file mode 100644 index 0000000..93ff85e Binary files /dev/null and b/website/public/assets/limitless.jpg differ diff --git a/website/public/assets/opinion.jpg b/website/public/assets/opinion.jpg new file mode 100644 index 0000000..627dd23 Binary files /dev/null and b/website/public/assets/opinion.jpg differ diff --git a/website/public/assets/polymarket.png b/website/public/assets/polymarket.png new file mode 100644 index 0000000..7ba9a0f Binary files /dev/null and b/website/public/assets/polymarket.png differ diff --git a/website/public/assets/predict_fun.jpg b/website/public/assets/predict_fun.jpg new file mode 100644 index 0000000..da6b23f Binary files /dev/null and b/website/public/assets/predict_fun.jpg differ diff --git a/website/public/favicon.jpg b/website/public/favicon.jpg new file mode 100644 index 0000000..2b34c65 Binary files /dev/null and b/website/public/favicon.jpg differ diff --git a/website/src/main.tsx b/website/src/main.tsx new file mode 100644 index 0000000..f5ca6d0 --- /dev/null +++ b/website/src/main.tsx @@ -0,0 +1,39 @@ +import React from 'react' +import ReactDOM from 'react-dom/client' +import { BrowserRouter, Routes, Route } from 'react-router-dom' +import { WagmiProvider } from 'wagmi' +import { RainbowKitProvider } from '@rainbow-me/rainbowkit' +import { QueryClient, QueryClientProvider } from '@tanstack/react-query' +import '@rainbow-me/rainbowkit/styles.css' + +import { config } from './wagmi' +import HomePage from './pages/HomePage' +import DocsPage from './pages/DocsPage' +import ApprovePage from './pages/ApprovePage' +import './styles.css' + +const queryClient = new QueryClient() + +function App() { + return ( + + + + + + } /> + } /> + } /> + + + + + + ) +} + +ReactDOM.createRoot(document.getElementById('root')!).render( + + + , +) diff --git a/website/src/pages/ApprovePage.tsx b/website/src/pages/ApprovePage.tsx new file mode 100644 index 0000000..0b14896 --- /dev/null +++ b/website/src/pages/ApprovePage.tsx @@ -0,0 +1,496 @@ +import { useState } from 'react' +import { ConnectButton } from '@rainbow-me/rainbowkit' +import { useAccount, useSignMessage, useWriteContract, useReadContract } from 'wagmi' +import { Link } from 'react-router-dom' + +import { createAuthMessage, OPERATOR_ADDRESS, CTF_CONTRACT_ADDRESS, CTF_ABI, EXPIRY_OPTIONS } from '../wagmi' + +export default function ApprovePage() { + const { address, isConnected } = useAccount() + const { signMessageAsync } = useSignMessage() + const { writeContractAsync, isPending: isWritePending } = useWriteContract() + + const [step, setStep] = useState(1) + const [signature, setSignature] = useState(null) + const [timestamp, setTimestamp] = useState(null) + const [expiry, setExpiry] = useState(EXPIRY_OPTIONS[1].value) + const [error, setError] = useState(null) + const [copied, setCopied] = useState(false) + const [showRevoke, setShowRevoke] = useState(false) + + const { data: isApproved, refetch: refetchApproval } = useReadContract({ + address: CTF_CONTRACT_ADDRESS, + abi: CTF_ABI, + functionName: 'isApprovedForAll', + args: address ? [address, OPERATOR_ADDRESS] : undefined, + }) + + const handleApproveOperator = async () => { + if (!address) return + setError(null) + + try { + await writeContractAsync({ + address: CTF_CONTRACT_ADDRESS, + abi: CTF_ABI, + functionName: 'setApprovalForAll', + args: [OPERATOR_ADDRESS, true], + }) + await refetchApproval() + setStep(3) + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to approve operator') + } + } + + const handleRevokeOperator = async () => { + if (!address) return + setError(null) + + try { + await writeContractAsync({ + address: CTF_CONTRACT_ADDRESS, + abi: CTF_ABI, + functionName: 'setApprovalForAll', + args: [OPERATOR_ADDRESS, false], + }) + await refetchApproval() + setShowRevoke(false) + setStep(1) + setSignature(null) + setTimestamp(null) + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to revoke operator') + } + } + + const handleSignAuth = async () => { + if (!address) return + setError(null) + + try { + const ts = Math.floor(Date.now() / 1000) + const message = createAuthMessage(address, ts, expiry) + const sig = await signMessageAsync({ message }) + setSignature(sig) + setTimestamp(ts) + setStep(4) + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to sign message') + } + } + + const getExpiryLabel = (seconds: number): string => { + const option = EXPIRY_OPTIONS.find(o => o.value === seconds) + return option?.label || `${seconds} seconds` + } + + const configSnippet = signature && timestamp ? `{ + "mcpServers": { + "dr-manhattan": { + "type": "sse", + "url": "https://dr-manhattan-mcp-production.up.railway.app/sse", + "headers": { + "X-Polymarket-Wallet-Address": "${address}", + "X-Polymarket-Auth-Signature": "${signature}", + "X-Polymarket-Auth-Timestamp": "${timestamp}", + "X-Polymarket-Auth-Expiry": "${expiry}" + } + } + } +}` : '' + + const copyConfig = () => { + navigator.clipboard.writeText(configSnippet) + setCopied(true) + setTimeout(() => setCopied(false), 2000) + } + + return ( +
+
+ dr-manhattan +
+ Docs + + + +
+
+ +
+
+

MCP Server Integration Guide

+

Connect Claude to Polymarket through Dr. Manhattan's MCP server

+
+ + {/* Introduction */} +
+

What is an MCP Server?

+

+ MCP (Model Context Protocol) is an open standard that allows AI assistants like Claude to securely + interact with external services. Dr. Manhattan provides an MCP server that enables Claude to: +

+
    +
  • Fetch real-time market data from Polymarket
  • +
  • View your positions and balances
  • +
  • Place and manage orders on your behalf
  • +
  • Execute trading strategies you define
  • +
+
+ + {/* Read-Only Mode */} +
+

Quick Start: Read-Only Mode

+

+ Want to explore market data without connecting a wallet? Use read-only mode to fetch markets, + prices, and orderbooks without any authentication. +

+
+
+ ~/.claude/settings.json + +
+
{`{
+  "mcpServers": {
+    "dr-manhattan": {
+      "type": "sse",
+      "url": "https://dr-manhattan-mcp-production.up.railway.app/sse"
+    }
+  }
+}`}
+
+
+

Available in read-only mode:

+
    +
  • Search and browse all Polymarket markets
  • +
  • Get real-time prices and orderbooks
  • +
  • View market details and resolution criteria
  • +
  • Analyze trading volume and liquidity
  • +
+
+

+ To place trades or view your positions, continue with the full setup below. +

+
+ + {/* How it Works - Trading Mode */} +
+

Trading Mode Setup

+

+ To place trades, Dr. Manhattan uses Operator Mode, a secure delegation mechanism built into Polymarket's + smart contracts. Here's how it works: +

+
+

Operator Mode Explained

+
    +
  1. + You approve Dr. Manhattan as an operator - This is an on-chain transaction that + grants permission to trade on your behalf. You can revoke this at any time. +
  2. +
  3. + You sign an authentication message - This proves you own the wallet and creates + a time-limited session. No private keys are shared. +
  4. +
  5. + Claude sends trading requests to the MCP server - The server validates your + signature and executes trades through Polymarket's API. +
  6. +
+
+

+ Your private keys never leave your wallet. The operator can only trade positions - it cannot + withdraw funds or transfer assets. +

+
+ + {/* Security */} +
+

Security Considerations

+
+
+

What the operator CAN do:

+
    +
  • Place buy/sell orders on Polymarket
  • +
  • Cancel your open orders
  • +
  • View your positions and balances
  • +
+
+
+

What the operator CANNOT do:

+
    +
  • Withdraw funds from your wallet
  • +
  • Transfer your assets to another address
  • +
  • Access your private keys
  • +
  • Trade after you revoke access
  • +
+
+
+

+ The operator contract is Polymarket's official CTF Exchange contract at{' '} + + {CTF_CONTRACT_ADDRESS.slice(0, 10)}...{CTF_CONTRACT_ADDRESS.slice(-8)} + +

+
+ + {/* Setup Steps */} +
+

Setup Steps

+ + {/* Step 1 */} +
+
+ = 1 ? 'active' : ''}`}>1 +
+

Connect Your Wallet

+

Connect the wallet you use for Polymarket trading.

+
+
+
+
+ +
+ {isConnected && ( +
+ Connected: {address?.slice(0, 6)}...{address?.slice(-4)} + {!isApproved && step === 1 && ( + + )} + {isApproved && step === 1 && ( + + )} +
+ )} +
+
+ + {/* Step 2 */} +
+
+ = 2 ? 'active' : ''} ${isApproved ? 'completed' : ''}`}>2 +
+

Approve Operator Access

+

Grant Dr. Manhattan permission to trade on your behalf.

+
+
+ {step >= 2 && ( +
+
+

+ This transaction calls setApprovalForAll on Polymarket's CTF Exchange contract, + allowing our operator address to execute trades for your account. +

+
+ + +
+ Function: + setApprovalForAll(operator, true) +
+
+
+ {isApproved ? ( +
+ Operator approved + +
+ ) : ( + + )} +
+ )} +
+ + {/* Step 3 */} +
+
+ = 3 ? 'active' : ''}`}>3 +
+

Sign Authentication Message

+

Create a time-limited session for the MCP server.

+
+
+ {step >= 3 && ( +
+
+

+ This signature proves you own the wallet without exposing your private key. + The MCP server validates this signature with each request. +

+

+ Choose how long the signature should be valid. Shorter durations are more secure + but require more frequent re-authentication. +

+
+
+ +
+ {EXPIRY_OPTIONS.map((option) => ( + + ))} +
+
+ {signature ? ( +
+ Signature created (valid for {getExpiryLabel(expiry)}) + +
+ ) : ( + + )} +
+ )} +
+ + {/* Step 4 */} +
+
+ = 4 ? 'active' : ''}`}>4 +
+

Configure Claude

+

Add the MCP server configuration to Claude.

+
+
+ {step >= 4 && ( +
+
+

+ Copy this configuration to your Claude settings file. The headers contain your + wallet address and signature for authentication. +

+

+ File location: ~/.claude/settings.json +

+
+
+
+ ~/.claude/settings.json +
+ Expires in {getExpiryLabel(expiry)} + +
+
+
{configSnippet}
+
+
+

Final Steps:

+
    +
  1. Open ~/.claude/settings.json in a text editor
  2. +
  3. Paste the configuration above
  4. +
  5. Save the file and restart Claude
  6. +
  7. Ask Claude to check your Polymarket positions
  8. +
+
+
+ )} +
+
+ + {error && ( +
{error}
+ )} + + {/* Revoke Section */} + {isConnected && isApproved && ( +
+

Revoke Access

+

+ You can revoke operator access at any time. This immediately prevents any further + trades from being executed on your behalf. +

+ {showRevoke ? ( +
+

Are you sure? This will invalidate all existing sessions.

+
+ + +
+
+ ) : ( + + )} +
+ )} + + {/* FAQ */} +
+

Frequently Asked Questions

+
+
+

Is this safe?

+

+ Yes. The operator can only trade positions on Polymarket - it cannot withdraw or + transfer your funds. You maintain full control and can revoke access instantly. +

+
+
+

What happens when my signature expires?

+

+ You'll need to sign a new authentication message. The operator approval remains + active, so you only need to repeat Step 3. +

+
+
+

Can I use this with multiple wallets?

+

+ Yes! Repeat this process for each wallet. You can configure multiple MCP servers + in Claude's settings with different names. +

+
+
+

Where can I see the source code?

+

+ Dr. Manhattan is fully open source. View the code on{' '} + GitHub. +

+
+
+
+
+ +
+

MIT License. Built for the prediction market community.

+
+
+ ) +} diff --git a/website/src/pages/DocsPage.tsx b/website/src/pages/DocsPage.tsx new file mode 100644 index 0000000..731fe20 --- /dev/null +++ b/website/src/pages/DocsPage.tsx @@ -0,0 +1,731 @@ +import { useEffect } from 'react' +import { Link } from 'react-router-dom' + +export default function DocsPage() { + useEffect(() => { + const handleScroll = () => { + const sections = document.querySelectorAll('section[id]') + const sidebarLinks = document.querySelectorAll('.sidebar-section a') + let current = '' + + sections.forEach(section => { + const sectionTop = (section as HTMLElement).offsetTop + if (window.scrollY >= sectionTop - 150) { + current = section.getAttribute('id') || '' + } + }) + + sidebarLinks.forEach(link => { + link.classList.remove('active') + if (link.getAttribute('href') === '#' + current) { + link.classList.add('active') + } + }) + } + + window.addEventListener('scroll', handleScroll) + handleScroll() + return () => window.removeEventListener('scroll', handleScroll) + }, []) + + return ( + <> +
+
+ + + +
+ + +
+
+

Documentation

+

dr-manhattan is a CCXT-style unified API for prediction markets. It provides a simple, scalable, and extensible interface to interact with multiple prediction market platforms.

+ +
+
+
+ +
+

Unified Interface

+

One API for all prediction markets. Write once, deploy anywhere.

+
+
+
+ +
+

Real-time Data

+

WebSocket support for live orderbook and trade updates.

+
+
+
+ +
+

Type Safe

+

Full type hints throughout for better IDE support.

+
+
+
+ +
+

Installation

+

Install dr-manhattan using uv (recommended):

+ +
+
+
+ + + +
+ terminal +
+
{`# Create virtual environment and install
+uv venv
+uv pip install -e .
+
+# Or install directly from GitHub
+uv pip install -e git+https://github.com/guzus/dr-manhattan`}
+
+ +
+

Note: dr-manhattan requires Python 3.11 or higher.

+
+
+ +
+

Quick Start

+

Here's a simple example to get you started:

+ +
+
+
+ + + +
+ example.py +
+
{`import dr_manhattan
+
+# Initialize any exchange
+polymarket = dr_manhattan.Polymarket({'timeout': 30})
+opinion = dr_manhattan.Opinion({'timeout': 30})
+limitless = dr_manhattan.Limitless({'timeout': 30})
+predictfun = dr_manhattan.PredictFun({'timeout': 30})
+
+# Fetch markets
+markets = polymarket.fetch_markets()
+
+for market in markets:
+    print(f"{market.question}: {market.prices}")`}
+
+
+ +
+

API Reference

+

All exchanges implement the same base interface, making it easy to switch between platforms or build cross-exchange applications.

+ +

Exchange Factory

+

Use the exchange factory to dynamically create exchange instances:

+ +
+
+
+ + + +
+ factory.py +
+
{`from dr_manhattan import create_exchange, list_exchanges
+
+# List available exchanges
+print(list_exchanges())
+# ['polymarket', 'kalshi', 'limitless', 'opinion', 'predictfun']
+
+# Create exchange by name
+exchange = create_exchange('polymarket', {'timeout': 30})`}
+
+
+ +
+

Markets

+

Fetch and query prediction markets:

+ + + + + + + + + + + + + + + + + + + + + + +
MethodDescription
fetch_markets()Fetch all available markets
fetch_market(market_id)Fetch a specific market by ID
fetch_orderbook(market_id)Get the orderbook for a market
+ +

Market Model

+
+
+
+ + + +
+ models/market.py +
+
{`class Market:
+    id: str              # Unique market identifier
+    question: str        # Market question
+    outcomes: list       # Available outcomes (e.g., ["Yes", "No"])
+    prices: dict         # Current prices for each outcome
+    volume: float        # Total trading volume
+    close_time: datetime # When the market closes
+    status: str          # Market status (open, closed, resolved)`}
+
+
+ +
+

Orders

+

Create and manage orders:

+ +
+
+
+ + + +
+ trading.py +
+
{`import dr_manhattan
+
+# Initialize with authentication
+polymarket = dr_manhattan.Polymarket({
+    'private_key': 'your_private_key',
+    'funder': 'your_funder_address',
+})
+
+# Create a buy order
+order = polymarket.create_order(
+    market_id="market_123",
+    outcome="Yes",
+    side=dr_manhattan.OrderSide.BUY,
+    price=0.65,
+    size=100,
+    params={'token_id': 'token_id'}
+)
+
+# Cancel an order
+polymarket.cancel_order(order.id)`}
+
+
+ +
+

Positions

+

Track your positions and balances:

+ +
+
+
+ + + +
+ positions.py +
+
{`# Fetch balance
+balance = polymarket.fetch_balance()
+print(f"USDC: {balance['USDC']}")
+
+# Fetch positions
+positions = polymarket.fetch_positions()
+for pos in positions:
+    print(f"{pos.market_id}: {pos.size} @ {pos.avg_price}")`}
+
+
+ +
+

WebSockets

+

Subscribe to real-time market data:

+ +
+
+
+ + + +
+ websocket.py +
+
{`import asyncio
+from dr_manhattan import PolymarketWS
+
+async def main():
+    ws = PolymarketWS()
+
+    async def on_orderbook(data):
+        print(f"Orderbook update: {data}")
+
+    await ws.subscribe_orderbook("market_id", on_orderbook)
+    await ws.run()
+
+asyncio.run(main())`}
+
+
+ +
+

Supported Exchanges

+

dr-manhattan supports the following prediction market exchanges:

+ + +
+ +
+

Polymarket

+

Polymarket is the leading prediction market on Polygon. It uses USDC for trading and requires a wallet for authentication.

+ +
+
+
+ + + +
+ polymarket_example.py +
+
{`import dr_manhattan
+
+polymarket = dr_manhattan.Polymarket({
+    'private_key': 'your_private_key',
+    'funder': 'your_funder_address',
+})
+
+# Fetch active markets
+markets = polymarket.fetch_markets()`}
+
+
+ +
+

Kalshi

+

Kalshi is a US-regulated prediction market exchange. It uses RSA-PSS authentication.

+ +
+
+
+ + + +
+ kalshi_example.py +
+
{`import dr_manhattan
+
+kalshi = dr_manhattan.Kalshi({
+    'api_key': 'your_api_key',
+    'private_key_path': '/path/to/private_key.pem',
+})`}
+
+
+ +
+

Opinion

+

Opinion is a prediction market on BNB Chain.

+ +
+
+
+ + + +
+ opinion_example.py +
+
{`import dr_manhattan
+
+opinion = dr_manhattan.Opinion({
+    'api_key': 'your_api_key',
+    'private_key': 'your_private_key',
+    'multi_sig_addr': 'your_multi_sig_addr'
+})`}
+
+
+ +
+

Limitless

+

Limitless is a prediction market platform with WebSocket support.

+ +
+
+
+ + + +
+ limitless_example.py +
+
{`import dr_manhattan
+
+limitless = dr_manhattan.Limitless({
+    'private_key': 'your_private_key',
+    'timeout': 30
+})`}
+
+
+ +
+

Predict.fun

+

Predict.fun is a prediction market on BNB Chain with smart wallet support.

+ +
+
+
+ + + +
+ predictfun_example.py +
+
{`import dr_manhattan
+
+predictfun = dr_manhattan.PredictFun({
+    'api_key': 'your_api_key',
+    'private_key': 'your_private_key',
+    'use_smart_wallet': True,
+    'smart_wallet_owner_private_key': 'your_owner_private_key',
+    'smart_wallet_address': 'your_smart_wallet_address'
+})`}
+
+
+ +
+

Strategy Framework

+

dr-manhattan provides a base class for building trading strategies with order tracking, position management, and event logging.

+ +
+
+
+ + + +
+ my_strategy.py +
+
{`from dr_manhattan import Strategy
+
+class MyStrategy(Strategy):
+    def on_tick(self):
+        self.log_status()
+        self.place_bbo_orders()
+
+# Run the strategy
+strategy = MyStrategy(exchange, market_id="123")
+strategy.run()`}
+
+
+ +
+

Spread Strategy

+

The spread strategy implements BBO (Best Bid/Offer) market making. It places orders at the best bid and ask prices with a configurable spread.

+ +
+
+
+ + + +
+ terminal +
+
{`uv run python examples/spread_strategy.py --exchange polymarket --slug fed-decision
+uv run python examples/spread_strategy.py --exchange opinion --market-id 813`}
+
+
+ +
+

Spike Strategy

+

The spike strategy implements mean reversion trading. It detects price spikes and places counter-trend orders.

+
+ +
+

Architecture

+

dr-manhattan follows a clean, modular architecture:

+ +
+
+
+ + + +
+ structure +
+
{`dr_manhattan/
+├── base/               # Core abstractions
+│   ├── exchange.py     # Abstract base class for exchanges
+│   ├── exchange_client.py  # High-level trading client
+│   ├── exchange_factory.py # Exchange instantiation
+│   ├── strategy.py     # Strategy base class
+│   ├── order_tracker.py    # Order event tracking
+│   ├── websocket.py    # WebSocket base class
+│   └── errors.py       # Exception hierarchy
+├── exchanges/          # Exchange implementations
+│   ├── polymarket.py
+│   ├── polymarket_ws.py
+│   ├── kalshi.py
+│   ├── opinion.py
+│   ├── limitless.py
+│   ├── limitless_ws.py
+│   ├── predictfun.py
+│   └── predictfun_ws.py
+├── models/             # Data models
+│   ├── market.py
+│   ├── order.py
+│   ├── orderbook.py
+│   └── position.py
+├── strategies/         # Strategy implementations
+└── utils/              # Utilities`}
+
+ +

Design Principles

+
    +
  • Unified Interface: All exchanges implement the same Exchange base class
  • +
  • Scalability: Adding new exchanges is straightforward - just implement the abstract methods
  • +
  • Simplicity: Clean abstractions with minimal dependencies
  • +
  • Type Safety: Full type hints throughout the codebase
  • +
+
+ +
+

Error Handling

+

All errors inherit from DrManhattanError:

+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ErrorDescription
ExchangeErrorExchange-specific errors
NetworkErrorConnectivity issues
RateLimitErrorRate limit exceeded
AuthenticationErrorAuth failures
InsufficientFundsNot enough balance
InvalidOrderInvalid order parameters
MarketNotFoundMarket doesn't exist
+
+ +
+

MCP Server

+

Trade prediction markets directly from Claude using the Model Context Protocol (MCP). Choose between the hosted remote server (recommended) or run locally.

+ +

Remote Server (Recommended)

+

Connect to the hosted MCP server without any local installation:

+ +
    +
  1. Connect Your Wallet: Go to the approval page to connect your Polymarket wallet and sign an authentication message.
  2. +
  3. Copy Configuration: After signing, copy the generated configuration.
  4. +
  5. Add to Claude: Paste into ~/.claude/settings.json (Claude Code) or ~/Library/Application Support/Claude/claude_desktop_config.json (Claude Desktop on macOS).
  6. +
+ +
+
+
+ + + +
+ settings.json +
+
{`{
+  "mcpServers": {
+    "dr-manhattan": {
+      "type": "sse",
+      "url": "https://dr-manhattan-mcp-production.up.railway.app/sse",
+      "headers": {
+        "X-Polymarket-Wallet-Address": "0xYourWalletAddress",
+        "X-Polymarket-Auth-Signature": "0xYourSignature...",
+        "X-Polymarket-Auth-Timestamp": "1706123456"
+      }
+    }
+  }
+}`}
+
+ +
+

Security: Your private key never leaves your wallet. The server uses operator mode where you approve it to trade on your behalf. Signatures expire after 24 hours.

+
+ +

Local Server

+

Run the MCP server locally for full control:

+ +
+
+
+ + + +
+ terminal +
+
{`# Install with MCP dependencies
+uv sync --extra mcp
+
+# Configure credentials
+cp .env.example .env
+# Edit .env with your POLYMARKET_PRIVATE_KEY and POLYMARKET_FUNDER`}
+
+ +

Add to your Claude Code settings:

+ +
+
+
+ + + +
+ settings.json +
+
{`{
+  "mcpServers": {
+    "dr-manhattan": {
+      "command": "/path/to/dr-manhattan/.venv/bin/python",
+      "args": ["-m", "dr_manhattan.mcp.server"],
+      "cwd": "/path/to/dr-manhattan"
+    }
+  }
+}`}
+
+ +

Available Commands

+

After restarting Claude, you can:

+
    +
  • "Show my Polymarket balance"
  • +
  • "Find active prediction markets"
  • +
  • "Buy 10 USDC of Yes on market X at 0.55"
  • +
  • "Cancel all my open orders"
  • +
+
+
+
+ + + + ) +} diff --git a/website/src/pages/HomePage.tsx b/website/src/pages/HomePage.tsx new file mode 100644 index 0000000..5c67c6a --- /dev/null +++ b/website/src/pages/HomePage.tsx @@ -0,0 +1,185 @@ +import { useEffect, useState } from 'react' +import { Link } from 'react-router-dom' + +export default function HomePage() { + const [showIntro, setShowIntro] = useState(true) + + useEffect(() => { + const timer = setTimeout(() => { + setShowIntro(false) + }, 2000) + return () => clearTimeout(timer) + }, []) + + const copyInstall = () => { + navigator.clipboard.writeText('uv pip install -e git+https://github.com/guzus/dr-manhattan') + } + + return ( + <> + {showIntro && ( +
+
+
Polymarket
+
Kalshi
+
Opinion
+
Limitless
+
Predict.fun
+
+
+
+
dr-manhattan
+
+
+ )} + + + +
+
+
Open Source
+

dr-manhattan

+

CCXT for prediction markets. Simple, scalable, and easy to extend.

+
+ Integrate MCP Server + + + View on GitHub + +
+ +
+ Supported Exchanges +
+ Polymarket + Kalshi + Opinion + Limitless + Predict.fun +
+
+
+
+ +
+
+

Simple, Unified Interface

+

Write exchange-agnostic code that works across all prediction markets

+
+ +
+
+ + + + example.py +
+
+
+import dr_manhattan{'\n'}
+{'\n'}
+# Initialize any exchange with the same interface{'\n'}
+polymarket = dr_manhattan.Polymarket({'{'}'timeout': 30{'}'}){'\n'}
+opinion = dr_manhattan.Opinion({'{'}'timeout': 30{'}'}){'\n'}
+limitless = dr_manhattan.Limitless({'{'}'timeout': 30{'}'}){'\n'}
+{'\n'}
+# Fetch markets from any platform{'\n'}
+markets = polymarket.fetch_markets(){'\n'}
+{'\n'}
+for market in markets:{'\n'}
+{'    '}print(f"{'{'}market.question{'}'}: {'{'}market.prices{'}'}")
+
+
+
+ +
+
+

Built for Developers

+

Everything you need to build prediction market applications

+
+ +
+
+
+ +
+

Unified Interface

+

One API to rule them all. Write code once and deploy across Polymarket, Kalshi, Opinion, and Limitless.

+
+ +
+
+ +
+

WebSocket Support

+

Real-time market data streaming with built-in WebSocket connections for live orderbook and trade updates.

+
+ +
+
+ +
+

Strategy Framework

+

Base class for building trading strategies with order tracking, position management, and event logging.

+
+ +
+
+ +
+

Easily Extensible

+

Add new exchanges by implementing abstract methods. Clean architecture makes integration straightforward.

+
+ +
+
+ +
+

Type Safe

+

Full type hints throughout the codebase. Catch errors early and enjoy superior IDE autocomplete.

+
+ +
+
+ +
+

Order Management

+

Create, cancel, and track orders with standardized error handling across all supported exchanges.

+
+
+
+ +
+
+

Get Started in Seconds

+

Install with uv and start building

+
+ +
+ uv pip install -e git+https://github.com/guzus/dr-manhattan + +
+
+ + + + ) +} diff --git a/website/src/styles.css b/website/src/styles.css new file mode 100644 index 0000000..712ecc4 --- /dev/null +++ b/website/src/styles.css @@ -0,0 +1,1920 @@ +:root { + --void: #050508; + --deep-space: #0a0a10; + --nebula: #0d0d15; + --manhattan-blue: #00b4ff; + --manhattan-glow: #00d4ff; + --quantum-cyan: #00ffff; + --atomic-purple: #7b5cff; + --text-primary: #e8eaed; + --text-secondary: #8b9098; + --text-muted: #5a5f6a; + --code-bg: #0c0c14; + --border-subtle: rgba(0, 180, 255, 0.15); + --glow-intense: 0 0 60px rgba(0, 180, 255, 0.4), 0 0 120px rgba(0, 180, 255, 0.2); + --glow-soft: 0 0 30px rgba(0, 180, 255, 0.3); + --success: #28c840; + --error: #ff5f57; +} + +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +html { + scroll-behavior: smooth; +} + +body { + font-family: 'Space Grotesk', -apple-system, sans-serif; + background: var(--void); + color: var(--text-primary); + line-height: 1.6; + overflow-x: hidden; + min-height: 100vh; +} + +/* Marvel-Style Intro */ +.marvel-intro { + position: fixed; + top: 0; + left: 0; + width: 100vw; + height: 100vh; + z-index: 9999; + background: #020408; + display: flex; + align-items: center; + justify-content: center; + overflow: hidden; +} + +.marvel-intro.fade-out { + animation: introFadeOut 0.8s ease-out forwards; +} + +@keyframes introFadeOut { + to { + opacity: 0; + visibility: hidden; + } +} + +.flip-book { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + perspective: 1000px; + opacity: 0; + animation: flipBookFade 1s ease-out forwards; +} + +@keyframes flipBookFade { + 0% { opacity: 0; } + 10% { opacity: 1; } + 90% { opacity: 1; } + 100% { opacity: 0; } +} + +.flip-page { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + display: flex; + align-items: center; + justify-content: center; + backface-visibility: hidden; + opacity: 0; +} + +.flip-page img { + width: 50vmin; + height: 50vmin; + object-fit: contain; + filter: brightness(0.9) contrast(1.1); +} + +.flip-page:nth-child(1) { animation: pageFlipVertical 0.2s ease-in-out 0s forwards; } +.flip-page:nth-child(2) { animation: pageFlipVertical 0.2s ease-in-out 0.2s forwards; } +.flip-page:nth-child(3) { animation: pageFlipVertical 0.2s ease-in-out 0.4s forwards; } +.flip-page:nth-child(4) { animation: pageFlipVertical 0.2s ease-in-out 0.6s forwards; } +.flip-page:nth-child(5) { animation: pageFlipVertical 0.2s ease-in-out 0.8s forwards; } + +@keyframes pageFlipVertical { + 0% { opacity: 0; transform: rotateX(90deg); } + 10% { opacity: 1; transform: rotateX(0deg); } + 90% { opacity: 1; transform: rotateX(0deg); } + 100% { opacity: 0; transform: rotateX(-90deg); } +} + +.color-overlay { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + pointer-events: none; + animation: colorShift 1s ease-in-out forwards; + mix-blend-mode: multiply; +} + +@keyframes colorShift { + 0% { background: #002244; } + 25% { background: #003366; } + 50% { background: #004488; } + 75% { background: #0055aa; } + 100% { background: #001133; } +} + +.logo-reveal { + position: absolute; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + z-index: 10; + opacity: 0; + animation: logoAppear 0.5s ease-out 1s forwards; +} + +@keyframes logoAppear { + 0% { opacity: 0; transform: scale(0.9); } + 100% { opacity: 1; transform: scale(1); } +} + +.logo-text { + font-family: 'Space Grotesk', sans-serif; + font-size: clamp(3rem, 12vw, 10rem); + font-weight: 700; + letter-spacing: -0.03em; + color: var(--manhattan-glow); + text-shadow: 0 0 60px rgba(0, 212, 255, 0.5); +} + +/* Navigation */ +nav { + position: fixed; + top: 0; + left: 0; + right: 0; + z-index: 100; + padding: 1.5rem 3rem; + display: flex; + justify-content: space-between; + align-items: center; + background: rgba(5, 5, 8, 0.8); + backdrop-filter: blur(10px); + border-bottom: 1px solid var(--border-subtle); +} + +.logo { + font-family: 'JetBrains Mono', monospace; + font-size: 1.25rem; + font-weight: 600; + color: var(--manhattan-glow); + text-decoration: none; + letter-spacing: -0.02em; +} + +.nav-links { + display: flex; + gap: 2.5rem; + align-items: center; +} + +.nav-links a { + color: var(--text-secondary); + text-decoration: none; + font-size: 0.9rem; + font-weight: 500; + transition: color 0.3s; +} + +.nav-links a:hover { + color: var(--manhattan-glow); +} + +.nav-icon { + color: var(--text-secondary); + text-decoration: none; + transition: color 0.3s; +} + +.nav-icon:hover { + color: var(--manhattan-glow); +} + +.nav-icon svg { + width: 20px; + height: 20px; +} + +.github-btn { + display: flex; + align-items: center; + gap: 0.5rem; + padding: 0.6rem 1.2rem; + background: transparent; + border: 1px solid var(--border-subtle); + border-radius: 6px; + color: var(--text-primary); + text-decoration: none; + font-size: 0.85rem; + font-weight: 500; + transition: all 0.3s; +} + +.github-btn:hover { + border-color: var(--manhattan-blue); + background: rgba(0, 180, 255, 0.1); + box-shadow: var(--glow-soft); +} + +.github-btn svg { + width: 18px; + height: 18px; +} + +/* Hero Section */ +.hero { + min-height: 100vh; + display: flex; + flex-direction: column; + justify-content: center; + align-items: center; + text-align: center; + padding: 8rem 2rem 4rem; + position: relative; +} + +.hero-content { + position: relative; + z-index: 2; +} + +.hero-badge { + display: inline-flex; + align-items: center; + gap: 0.5rem; + padding: 0.4rem 1rem; + background: rgba(0, 180, 255, 0.1); + border: 1px solid var(--border-subtle); + border-radius: 100px; + font-size: 0.8rem; + color: var(--manhattan-glow); + margin-bottom: 2rem; + animation: fadeInUp 0.8s ease-out; +} + +.hero-badge::before { + content: ''; + width: 6px; + height: 6px; + background: var(--manhattan-glow); + border-radius: 50%; + animation: pulse 2s infinite; +} + +@keyframes pulse { + 0%, 100% { opacity: 1; } + 50% { opacity: 0.5; } +} + +.hero h1 { + font-size: clamp(3rem, 8vw, 6rem); + font-weight: 700; + letter-spacing: -0.03em; + line-height: 1.1; + margin-bottom: 1.5rem; + animation: fadeInUp 0.8s ease-out 0.1s both; +} + +.hero h1 .glow { + color: var(--manhattan-glow); +} + +.glow { + color: var(--manhattan-glow); +} + +.hero-tagline { + font-size: clamp(1.1rem, 2.5vw, 1.4rem); + color: var(--text-secondary); + max-width: 600px; + margin-bottom: 3rem; + font-weight: 400; + animation: fadeInUp 0.8s ease-out 0.2s both; +} + +@keyframes fadeInUp { + from { opacity: 0; transform: translateY(30px); } + to { opacity: 1; transform: translateY(0); } +} + +.hero-actions { + display: flex; + gap: 1rem; + flex-wrap: wrap; + justify-content: center; + animation: fadeInUp 0.8s ease-out 0.3s both; +} + +/* Buttons */ +.btn-primary { + display: inline-flex; + align-items: center; + justify-content: center; + gap: 0.5rem; + padding: 0.9rem 2rem; + background: var(--manhattan-blue); + border: none; + border-radius: 8px; + color: white; + font-size: 1rem; + font-weight: 600; + font-family: inherit; + text-decoration: none; + cursor: pointer; + transition: all 0.3s; +} + +.btn-primary:hover:not(:disabled) { + transform: translateY(-2px); + box-shadow: var(--glow-soft); +} + +.btn-primary:disabled { + opacity: 0.6; + cursor: not-allowed; +} + +.btn-secondary { + display: inline-flex; + align-items: center; + gap: 0.5rem; + padding: 0.9rem 2rem; + background: rgba(0, 180, 255, 0.08); + border: 1px solid rgba(0, 180, 255, 0.4); + border-radius: 8px; + color: var(--text-primary); + font-size: 1rem; + font-weight: 500; + font-family: inherit; + text-decoration: none; + cursor: pointer; + transition: all 0.3s; +} + +.btn-secondary:hover { + border-color: var(--manhattan-blue); + background: rgba(0, 180, 255, 0.1); +} + +.btn-secondary svg { + width: 18px; + height: 18px; +} + +/* Exchanges Section */ +.exchanges-preview { + display: flex; + gap: 3rem; + align-items: center; + justify-content: center; + margin-top: 5rem; + padding-top: 3rem; + border-top: 1px solid var(--border-subtle); + animation: fadeInUp 0.8s ease-out 0.4s both; +} + +.exchanges-preview span { + font-size: 0.85rem; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.1em; +} + +.exchange-logos { + display: flex; + gap: 2rem; + align-items: center; +} + +.exchange-logo { + width: 48px; + height: 48px; + border-radius: 12px; + object-fit: cover; + filter: grayscale(0.3); + opacity: 0.8; + transition: all 0.3s; +} + +.exchange-logo:hover { + filter: grayscale(0); + opacity: 1; + transform: scale(1.1); +} + +/* Code Section */ +.code-section { + padding: 8rem 2rem; + max-width: 1200px; + margin: 0 auto; +} + +.section-header { + text-align: center; + margin-bottom: 4rem; +} + +.section-header h2 { + font-size: clamp(2rem, 4vw, 2.75rem); + font-weight: 600; + letter-spacing: -0.02em; + margin-bottom: 1rem; +} + +.section-header p { + color: var(--text-secondary); + font-size: 1.1rem; + max-width: 500px; + margin: 0 auto; +} + +.code-container { + position: relative; + background: var(--code-bg); + border: 1px solid var(--border-subtle); + border-radius: 16px; + overflow: hidden; +} + +.code-header { + display: flex; + align-items: center; + gap: 0.5rem; + padding: 1rem 1.5rem; + background: rgba(0, 0, 0, 0.3); + border-bottom: 1px solid var(--border-subtle); +} + +.code-dot { + width: 12px; + height: 12px; + border-radius: 50%; + background: var(--text-muted); +} + +.code-dot:nth-child(1) { background: #ff5f57; } +.code-dot:nth-child(2) { background: #ffbd2e; } +.code-dot:nth-child(3) { background: #28c840; } + +.code-filename { + margin-left: auto; + font-family: 'JetBrains Mono', monospace; + font-size: 0.8rem; + color: var(--text-muted); +} + +.code-block { + padding: 2rem; + overflow-x: auto; +} + +.code-block pre { + font-family: 'JetBrains Mono', monospace; + font-size: 0.9rem; + line-height: 1.8; + color: var(--text-secondary); +} + +/* Syntax Highlighting */ +.code-block .kw { color: #c792ea; } /* keywords: import, for, in */ +.code-block .fn { color: #82aaff; } /* functions */ +.code-block .st { color: #c3e88d; } /* strings */ +.code-block .nu { color: #f78c6c; } /* numbers */ +.code-block .cm { color: #546e7a; } /* comments */ + +/* Features Section */ +.features-section { + padding: 6rem 2rem; + max-width: 1200px; + margin: 0 auto; +} + +.features-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(320px, 1fr)); + gap: 1.5rem; +} + +.feature-card { + position: relative; + padding: 2rem; + background: rgba(10, 10, 16, 0.8); + border: 1px solid var(--border-subtle); + border-radius: 16px; + transition: all 0.4s; +} + +.feature-card:hover { + border-color: rgba(0, 180, 255, 0.3); + transform: translateY(-4px); +} + +.feature-icon { + width: 48px; + height: 48px; + display: flex; + align-items: center; + justify-content: center; + background: rgba(0, 180, 255, 0.1); + border-radius: 12px; + margin-bottom: 1.25rem; + color: var(--manhattan-glow); +} + +.feature-icon svg { + width: 24px; + height: 24px; +} + +.feature-card h3 { + font-size: 1.15rem; + font-weight: 600; + margin-bottom: 0.75rem; + letter-spacing: -0.01em; +} + +.feature-card p { + color: var(--text-secondary); + font-size: 0.95rem; + line-height: 1.6; +} + +/* Install Section */ +.install-section { + padding: 6rem 2rem; + text-align: center; +} + +.install-box { + display: inline-flex; + align-items: center; + gap: 1rem; + padding: 1rem 1.5rem; + background: var(--code-bg); + border: 1px solid var(--border-subtle); + border-radius: 12px; + margin-top: 2rem; +} + +.install-box code { + font-family: 'JetBrains Mono', monospace; + font-size: 1rem; + color: var(--manhattan-glow); + background: none; + padding: 0; +} + +.copy-btn { + padding: 0.5rem; + background: transparent; + border: none; + color: var(--text-muted); + cursor: pointer; + transition: color 0.3s; + border-radius: 6px; +} + +.copy-btn:hover { + color: var(--manhattan-glow); + background: rgba(0, 180, 255, 0.1); +} + +.copy-btn svg { + width: 18px; + height: 18px; +} + +/* Footer */ +footer { + padding: 4rem 2rem; + text-align: center; + border-top: 1px solid var(--border-subtle); +} + +.footer-links { + display: flex; + justify-content: center; + gap: 2rem; + margin-bottom: 2rem; +} + +.footer-links a { + color: var(--text-secondary); + text-decoration: none; + font-size: 0.9rem; + transition: color 0.3s; +} + +.footer-links a:hover { + color: var(--manhattan-glow); +} + +.footer-copy { + color: var(--text-muted); + font-size: 0.85rem; +} + +/* Docs Layout */ +.cosmic-bg { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + pointer-events: none; + z-index: -1; + background: + radial-gradient(ellipse at 20% 20%, rgba(0, 180, 255, 0.08) 0%, transparent 50%), + radial-gradient(ellipse at 80% 80%, rgba(123, 92, 255, 0.05) 0%, transparent 50%), + radial-gradient(ellipse at 50% 50%, rgba(0, 212, 255, 0.03) 0%, transparent 70%); +} + +.grid-overlay { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + pointer-events: none; + z-index: -1; + background-image: + linear-gradient(rgba(0, 180, 255, 0.03) 1px, transparent 1px), + linear-gradient(90deg, rgba(0, 180, 255, 0.03) 1px, transparent 1px); + background-size: 60px 60px; + mask-image: radial-gradient(ellipse at center, black 0%, transparent 70%); +} + +.docs-layout { + display: flex; + padding-top: 80px; + min-height: 100vh; +} + +.sidebar { + position: fixed; + top: 80px; + left: 0; + width: 280px; + height: calc(100vh - 80px); + padding: 2rem; + background: rgba(10, 10, 16, 0.8); + border-right: 1px solid var(--border-subtle); + overflow-y: auto; +} + +.sidebar-section { + margin-bottom: 2rem; +} + +.sidebar-section h3 { + font-size: 0.75rem; + font-weight: 600; + color: var(--text-muted); + text-transform: uppercase; + letter-spacing: 0.1em; + margin-bottom: 0.75rem; +} + +.sidebar-section a { + display: block; + padding: 0.5rem 0; + color: var(--text-secondary); + text-decoration: none; + font-size: 0.9rem; + transition: all 0.2s; + border-left: 2px solid transparent; + padding-left: 1rem; + margin-left: -1rem; +} + +.sidebar-section a:hover, +.sidebar-section a.active { + color: var(--manhattan-glow); + border-left-color: var(--manhattan-glow); +} + +.docs-content { + flex: 1; + margin-left: 280px; + padding: 3rem 4rem; + max-width: 900px; +} + +.docs-content h1 { + font-size: 2.5rem; + font-weight: 700; + margin-bottom: 1rem; + color: var(--manhattan-glow); + text-shadow: 0 0 30px rgba(0, 212, 255, 0.3); +} + +.docs-content h2 { + font-size: 1.75rem; + font-weight: 600; + margin-top: 3rem; + margin-bottom: 1rem; + padding-top: 1rem; + border-top: 1px solid var(--border-subtle); +} + +.docs-content h3 { + font-size: 1.25rem; + font-weight: 600; + margin-top: 2rem; + margin-bottom: 0.75rem; + color: var(--text-primary); +} + +.docs-content h4 { + font-size: 1.1rem; + font-weight: 600; + margin-top: 1.5rem; + margin-bottom: 0.5rem; +} + +.docs-content p { + color: var(--text-secondary); + margin-bottom: 1rem; + line-height: 1.8; +} + +.docs-content ul, +.docs-content ol { + color: var(--text-secondary); + margin-bottom: 1rem; + padding-left: 1.5rem; +} + +.docs-content li { + margin-bottom: 0.5rem; +} + +.docs-content a { + color: var(--manhattan-glow); + text-decoration: none; + transition: all 0.2s; +} + +.docs-content a:hover { + text-shadow: 0 0 10px rgba(0, 212, 255, 0.5); +} + +.docs-content section { + scroll-margin-top: 100px; +} + +/* Inline Code */ +code { + font-family: 'JetBrains Mono', monospace; + font-size: 0.85em; + background: rgba(0, 180, 255, 0.1); + padding: 0.2em 0.5em; + border-radius: 4px; + color: var(--manhattan-glow); +} + +/* Cards */ +.card-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(250px, 1fr)); + gap: 1.5rem; + margin: 2rem 0; +} + +.card { + background: linear-gradient(135deg, rgba(13, 13, 21, 0.8) 0%, rgba(10, 10, 16, 0.9) 100%); + border: 1px solid var(--border-subtle); + border-radius: 12px; + padding: 1.5rem; + transition: all 0.3s; +} + +.card:hover { + border-color: rgba(0, 180, 255, 0.3); + transform: translateY(-2px); + box-shadow: 0 10px 30px rgba(0, 0, 0, 0.3), 0 0 30px rgba(0, 180, 255, 0.1); +} + +.card h4 { + font-size: 1.1rem; + margin-bottom: 0.5rem; + color: var(--text-primary); +} + +.card p { + font-size: 0.9rem; + color: var(--text-secondary); + margin: 0; +} + +.card-icon { + width: 40px; + height: 40px; + display: flex; + align-items: center; + justify-content: center; + background: rgba(0, 180, 255, 0.1); + border-radius: 8px; + margin-bottom: 1rem; + color: var(--manhattan-glow); +} + +.card-icon svg { + width: 20px; + height: 20px; +} + +/* Tables */ +table { + width: 100%; + border-collapse: collapse; + margin: 1.5rem 0; +} + +th, td { + padding: 1rem; + text-align: left; + border-bottom: 1px solid var(--border-subtle); +} + +th { + font-weight: 600; + color: var(--text-primary); + background: rgba(0, 180, 255, 0.05); +} + +td { + color: var(--text-secondary); +} + +td code { + font-size: 0.8rem; +} + +/* Callouts */ +.callout { + padding: 1rem 1.5rem; + border-radius: 8px; + margin: 1.5rem 0; + border-left: 4px solid; +} + +.callout-info { + background: rgba(0, 180, 255, 0.1); + border-color: var(--manhattan-blue); +} + +.callout-warning { + background: rgba(255, 189, 46, 0.1); + border-color: #ffbd2e; +} + +.callout p { + margin: 0; +} + +/* Exchange Grid */ +.exchange-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); + gap: 1.5rem; + margin: 2rem 0; +} + +.exchange-card { + display: flex; + flex-direction: column; + align-items: center; + padding: 2rem; + background: linear-gradient(135deg, rgba(13, 13, 21, 0.8) 0%, rgba(10, 10, 16, 0.9) 100%); + border: 1px solid var(--border-subtle); + border-radius: 12px; + text-decoration: none; + transition: all 0.3s; +} + +.exchange-card:hover { + border-color: rgba(0, 180, 255, 0.3); + transform: translateY(-2px); + box-shadow: 0 10px 30px rgba(0, 0, 0, 0.3), 0 0 30px rgba(0, 180, 255, 0.1); +} + +.exchange-card img { + width: 60px; + height: 60px; + border-radius: 12px; + margin-bottom: 1rem; +} + +.exchange-card span { + color: var(--text-primary); + font-weight: 500; +} + +/* Docs Code Block */ +.docs-content .code-block { + background: var(--code-bg); + border: 1px solid var(--border-subtle); + border-radius: 12px; + margin: 1.5rem 0; + overflow: hidden; +} + +.dots { + display: flex; + gap: 6px; +} + +.dot { + width: 10px; + height: 10px; + border-radius: 50%; +} + +.dot:nth-child(1) { background: #ff5f57; } +.dot:nth-child(2) { background: #ffbd2e; } +.dot:nth-child(3) { background: #28c840; } + +.filename { + font-family: 'JetBrains Mono', monospace; + font-size: 0.8rem; + color: var(--text-muted); +} + +.docs-content .code-block .code-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 0.75rem 1rem; + background: rgba(0, 0, 0, 0.3); + border-bottom: 1px solid var(--border-subtle); +} + +.docs-content .code-block pre { + padding: 1.5rem; + overflow-x: auto; + font-family: 'JetBrains Mono', monospace; + font-size: 0.85rem; + line-height: 1.7; +} + +.docs-footer { + margin-left: 280px; + padding: 3rem 4rem; + border-top: 1px solid var(--border-subtle); + text-align: center; +} + +/* Onboarding Container */ +.onboarding-container { + max-width: 640px; + margin: 0 auto; + padding: 4rem 2rem; +} + +.onboarding-header { + text-align: center; + margin-bottom: 3rem; +} + +.back-link { + display: inline-block; + color: var(--text-secondary); + text-decoration: none; + font-size: 0.9rem; + margin-bottom: 2rem; + transition: color 0.3s; +} + +.back-link:hover { + color: var(--manhattan-glow); +} + +.onboarding-header h1 { + font-size: 2.5rem; + font-weight: 700; + letter-spacing: -0.03em; + margin-bottom: 1rem; +} + +.onboarding-header p { + color: var(--text-secondary); + font-size: 1.1rem; +} + +/* Steps Indicator */ +.steps-indicator { + display: flex; + align-items: center; + justify-content: center; + margin-bottom: 2rem; +} + +.step-dot { + width: 36px; + height: 36px; + border-radius: 50%; + background: var(--deep-space); + border: 2px solid var(--border-subtle); + display: flex; + align-items: center; + justify-content: center; + font-weight: 600; + font-size: 0.9rem; + color: var(--text-muted); + transition: all 0.3s; +} + +.step-dot.active { + background: var(--manhattan-blue); + border-color: var(--manhattan-blue); + color: white; +} + +.step-line { + width: 60px; + height: 2px; + background: var(--border-subtle); + margin: 0 0.5rem; +} + +/* Step Cards */ +.step-card { + background: var(--deep-space); + border: 1px solid var(--border-subtle); + border-radius: 16px; + padding: 2rem; + margin-bottom: 1.5rem; + transition: all 0.3s; +} + +.step-card.current { + border-color: var(--manhattan-blue); + box-shadow: 0 0 20px rgba(0, 180, 255, 0.1); +} + +.step-card.completed { + opacity: 0.6; +} + +.step-card.disabled { + opacity: 0.4; + pointer-events: none; +} + +.step-card h2 { + font-size: 1.25rem; + font-weight: 600; + margin-bottom: 0.5rem; +} + +.step-card p { + color: var(--text-secondary); + font-size: 0.95rem; +} + +/* Connect Button Wrapper */ +.connect-button-wrapper { + margin: 1.5rem 0; + display: flex; + justify-content: center; +} + +/* Step Actions */ +.step-actions { + margin-top: 1.5rem; + padding-top: 1.5rem; + border-top: 1px solid var(--border-subtle); +} + +.info-text { + font-size: 0.9rem; + color: var(--text-secondary); + margin-bottom: 1rem; +} + +.step-actions .btn-primary { + width: 100%; +} + +/* Approval Status */ +.approval-status { + display: flex; + align-items: center; + justify-content: space-between; + padding: 1rem; + background: rgba(40, 200, 64, 0.1); + border: 1px solid rgba(40, 200, 64, 0.3); + border-radius: 8px; + color: var(--success); + font-weight: 500; +} + +.approval-status .btn-primary { + width: auto; + padding: 0.5rem 1rem; + font-size: 0.9rem; +} + +/* Config Section */ +.config-section { + margin-top: 1.5rem; +} + +.config-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 0.75rem 1rem; + background: rgba(0, 0, 0, 0.3); + border: 1px solid var(--border-subtle); + border-bottom: none; + border-radius: 8px 8px 0 0; + font-family: 'JetBrains Mono', monospace; + font-size: 0.8rem; + color: var(--text-muted); +} + +.config-section .copy-btn { + padding: 0.4rem 0.8rem; + background: var(--manhattan-blue); + border: none; + border-radius: 4px; + color: white; + font-size: 0.8rem; + font-weight: 500; + cursor: pointer; + transition: all 0.3s; +} + +.config-section .copy-btn:hover { + background: var(--manhattan-glow); +} + +.config-code { + padding: 1.5rem; + background: var(--code-bg); + border: 1px solid var(--border-subtle); + border-radius: 0 0 8px 8px; + font-family: 'JetBrains Mono', monospace; + font-size: 0.8rem; + line-height: 1.6; + overflow-x: auto; + white-space: pre; + color: var(--manhattan-glow); +} + +/* Expiry Selector */ +.expiry-selector { + margin-bottom: 1.5rem; +} + +.expiry-selector label { + display: block; + font-size: 0.9rem; + color: var(--text-secondary); + margin-bottom: 0.75rem; +} + +.expiry-options { + display: flex; + gap: 0.5rem; + flex-wrap: wrap; +} + +.expiry-option { + padding: 0.5rem 1rem; + background: var(--deep-space); + border: 1px solid var(--border-subtle); + border-radius: 6px; + color: var(--text-secondary); + font-size: 0.85rem; + font-family: inherit; + cursor: pointer; + transition: all 0.2s; +} + +.expiry-option:hover { + border-color: var(--manhattan-blue); + color: var(--text-primary); +} + +.expiry-option.selected { + background: var(--manhattan-blue); + border-color: var(--manhattan-blue); + color: white; +} + +.expiry-hint { + margin-top: 0.75rem; + font-size: 0.8rem; + color: var(--text-muted); +} + +/* Config Meta */ +.config-meta { + margin-bottom: 0.75rem; +} + +.expiry-badge { + display: inline-block; + padding: 0.25rem 0.75rem; + background: rgba(0, 180, 255, 0.1); + border: 1px solid var(--border-subtle); + border-radius: 100px; + font-size: 0.8rem; + color: var(--manhattan-glow); +} + +/* Revoke Section */ +.revoke-section { + margin-top: 2rem; + padding-top: 2rem; + border-top: 1px solid var(--border-subtle); + text-align: center; +} + +.btn-text { + background: none; + border: none; + color: var(--text-muted); + font-size: 0.9rem; + font-family: inherit; + cursor: pointer; + transition: color 0.2s; +} + +.btn-text:hover { + color: var(--error); +} + +.revoke-card { + margin-top: 1rem; + padding: 1.5rem; + background: rgba(255, 95, 87, 0.05); + border: 1px solid rgba(255, 95, 87, 0.2); + border-radius: 12px; + text-align: left; +} + +.revoke-card h3 { + font-size: 1rem; + color: var(--error); + margin-bottom: 0.5rem; +} + +.revoke-card p { + font-size: 0.9rem; + color: var(--text-secondary); + margin-bottom: 1rem; +} + +.btn-danger { + display: inline-flex; + align-items: center; + justify-content: center; + padding: 0.75rem 1.5rem; + background: var(--error); + border: none; + border-radius: 8px; + color: white; + font-size: 0.9rem; + font-weight: 600; + font-family: inherit; + cursor: pointer; + transition: all 0.3s; +} + +.btn-danger:hover:not(:disabled) { + background: #ff3b30; + box-shadow: 0 0 20px rgba(255, 95, 87, 0.3); +} + +.btn-danger:disabled { + opacity: 0.6; + cursor: not-allowed; +} + +/* Messages */ +.success-message { + margin-top: 1.5rem; + padding: 1rem; + background: rgba(40, 200, 64, 0.1); + border: 1px solid rgba(40, 200, 64, 0.3); + border-radius: 8px; + color: var(--success); + text-align: center; + font-weight: 500; +} + +.error-message { + margin-top: 1rem; + padding: 1rem; + background: rgba(255, 95, 87, 0.1); + border: 1px solid rgba(255, 95, 87, 0.3); + border-radius: 8px; + color: var(--error); + text-align: center; +} + +/* Guide Page */ +.guide-container { + min-height: 100vh; + background: var(--deep-space); +} + +.guide-nav { + position: relative; + display: flex; + justify-content: space-between; + align-items: center; + padding: 1.5rem 4rem; + border-bottom: 1px solid var(--border-subtle); + background: var(--deep-space); +} + +.guide-content { + max-width: 800px; + margin: 0 auto; + padding: 3rem 2rem 4rem; +} + +.guide-header { + text-align: center; + margin-bottom: 3rem; +} + +.guide-header h1 { + font-size: 2.5rem; + font-weight: 700; + letter-spacing: -0.03em; + margin-bottom: 0.75rem; +} + +.guide-subtitle { + font-size: 1.1rem; + color: var(--text-secondary); +} + +.guide-section { + margin-bottom: 3rem; +} + +.guide-section h2 { + font-size: 1.5rem; + font-weight: 600; + margin-bottom: 1rem; + color: var(--text-primary); +} + +.guide-section p { + color: var(--text-secondary); + line-height: 1.7; + margin-bottom: 1rem; +} + +.guide-list { + margin: 1rem 0 1.5rem 1.5rem; + color: var(--text-secondary); + line-height: 1.8; +} + +.guide-list li { + margin-bottom: 0.5rem; +} + +.guide-list.numbered { + list-style-type: decimal; +} + +.guide-list.numbered li { + margin-bottom: 1rem; + padding-left: 0.5rem; +} + +/* Info Card */ +.info-card { + background: rgba(0, 180, 255, 0.05); + border: 1px solid rgba(0, 180, 255, 0.2); + border-radius: 12px; + padding: 1.5rem; + margin: 1.5rem 0; +} + +.info-card h3 { + font-size: 1.1rem; + color: var(--manhattan-glow); + margin-bottom: 1rem; +} + +.security-note { + padding: 1rem; + background: rgba(40, 200, 64, 0.08); + border-left: 3px solid var(--success); + border-radius: 0 8px 8px 0; + color: var(--text-secondary); + font-size: 0.95rem; +} + +/* Read-Only Features */ +.read-only-features { + background: rgba(0, 180, 255, 0.05); + border: 1px solid rgba(0, 180, 255, 0.15); + border-radius: 10px; + padding: 1.25rem; + margin: 1rem 0; +} + +.read-only-features h4 { + font-size: 0.95rem; + color: var(--manhattan-glow); + margin-bottom: 0.75rem; +} + +.read-only-features .guide-list { + margin: 0; +} + +.info-text { + font-size: 0.9rem; + color: var(--text-muted); + font-style: italic; +} + +/* Security Grid */ +.security-grid { + display: grid; + grid-template-columns: 1fr 1fr; + gap: 1rem; + margin: 1.5rem 0; +} + +.security-item { + padding: 1.25rem; + border-radius: 10px; +} + +.security-item.safe { + background: rgba(40, 200, 64, 0.08); + border: 1px solid rgba(40, 200, 64, 0.2); +} + +.security-item.restricted { + background: rgba(255, 95, 87, 0.08); + border: 1px solid rgba(255, 95, 87, 0.2); +} + +.security-item h4 { + font-size: 0.9rem; + font-weight: 600; + margin-bottom: 0.75rem; +} + +.security-item.safe h4 { + color: var(--success); +} + +.security-item.restricted h4 { + color: var(--error); +} + +.security-item ul { + margin: 0; + padding-left: 1.25rem; + font-size: 0.9rem; + color: var(--text-secondary); + line-height: 1.6; +} + +.code-link { + color: var(--manhattan-glow); + font-family: 'JetBrains Mono', monospace; + font-size: 0.9rem; +} + +/* Setup Steps */ +.setup-step { + background: var(--deep-space); + border: 1px solid var(--border-subtle); + border-radius: 12px; + padding: 1.5rem; + margin-bottom: 1rem; + transition: all 0.3s; +} + +.setup-step.locked { + opacity: 0.5; +} + +.step-header { + display: flex; + gap: 1rem; + align-items: flex-start; +} + +.step-number { + width: 32px; + height: 32px; + border-radius: 50%; + background: var(--void); + border: 2px solid var(--border-subtle); + display: flex; + align-items: center; + justify-content: center; + font-weight: 600; + font-size: 0.85rem; + color: var(--text-muted); + flex-shrink: 0; +} + +.step-number.active { + background: var(--manhattan-blue); + border-color: var(--manhattan-blue); + color: white; +} + +.step-number.completed { + background: var(--success); + border-color: var(--success); +} + +.step-header h3 { + font-size: 1.1rem; + font-weight: 600; + margin-bottom: 0.25rem; +} + +.step-header p { + font-size: 0.9rem; + color: var(--text-secondary); + margin: 0; +} + +.step-content { + margin-top: 1.25rem; + padding-left: 2.75rem; +} + +.step-explanation { + margin-bottom: 1.25rem; +} + +.step-explanation p { + font-size: 0.9rem; + margin-bottom: 0.75rem; +} + +.step-explanation code { + background: var(--code-bg); + padding: 0.15rem 0.4rem; + border-radius: 4px; + font-family: 'JetBrains Mono', monospace; + font-size: 0.85rem; + color: var(--manhattan-glow); +} + +.connect-wrapper { + margin-bottom: 1rem; +} + +.step-status { + display: flex; + align-items: center; + justify-content: space-between; + padding: 0.75rem 1rem; + border-radius: 8px; + font-size: 0.9rem; +} + +.step-status.success { + background: rgba(40, 200, 64, 0.1); + color: var(--success); +} + +.btn-small { + padding: 0.4rem 0.8rem; + background: var(--manhattan-blue); + border: none; + border-radius: 6px; + color: white; + font-size: 0.8rem; + font-weight: 500; + font-family: inherit; + cursor: pointer; + transition: all 0.2s; +} + +.btn-small:hover { + background: var(--manhattan-glow); +} + +/* Contract Details */ +.contract-details { + background: var(--code-bg); + border-radius: 8px; + padding: 1rem; + margin-top: 1rem; +} + +.detail-row { + display: flex; + gap: 1rem; + padding: 0.5rem 0; + font-size: 0.85rem; + border-bottom: 1px solid rgba(255,255,255,0.05); +} + +.detail-row:last-child { + border-bottom: none; +} + +.detail-row span:first-child { + color: var(--text-muted); + min-width: 80px; +} + +.detail-row a { + color: var(--manhattan-glow); + text-decoration: none; +} + +.detail-row a:hover { + text-decoration: underline; +} + +.detail-row code { + font-family: 'JetBrains Mono', monospace; + color: var(--text-secondary); +} + +/* Config Block */ +.config-block { + background: var(--code-bg); + border: 1px solid var(--border-subtle); + border-radius: 10px; + overflow: hidden; + margin-bottom: 1.5rem; +} + +.config-block .config-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: 0.75rem 1rem; + background: rgba(0, 0, 0, 0.3); + border-bottom: 1px solid var(--border-subtle); + font-size: 0.85rem; + color: var(--text-secondary); +} + +.config-actions { + display: flex; + align-items: center; + gap: 0.75rem; +} + +.config-code { + padding: 1.25rem; + font-family: 'JetBrains Mono', monospace; + font-size: 0.8rem; + line-height: 1.6; + color: var(--text-secondary); + overflow-x: auto; +} + +/* Final Steps */ +.final-steps { + background: rgba(0, 180, 255, 0.05); + border: 1px solid rgba(0, 180, 255, 0.15); + border-radius: 10px; + padding: 1.25rem; +} + +.final-steps h4 { + font-size: 0.95rem; + margin-bottom: 0.75rem; + color: var(--text-primary); +} + +.final-steps ol { + margin: 0; + padding-left: 1.25rem; + font-size: 0.9rem; + color: var(--text-secondary); + line-height: 1.8; +} + +.final-steps code { + background: var(--code-bg); + padding: 0.1rem 0.35rem; + border-radius: 4px; + font-family: 'JetBrains Mono', monospace; + font-size: 0.85rem; +} + +/* Revoke Confirm */ +.revoke-confirm { + background: rgba(255, 95, 87, 0.08); + border: 1px solid rgba(255, 95, 87, 0.2); + border-radius: 10px; + padding: 1.25rem; + margin-top: 1rem; +} + +.revoke-confirm p { + color: var(--text-secondary); + margin-bottom: 1rem; +} + +.revoke-actions { + display: flex; + gap: 0.75rem; +} + +.btn-outline-danger { + padding: 0.6rem 1.25rem; + background: transparent; + border: 1px solid rgba(255, 95, 87, 0.4); + border-radius: 8px; + color: var(--error); + font-size: 0.9rem; + font-family: inherit; + cursor: pointer; + transition: all 0.2s; +} + +.btn-outline-danger:hover { + background: rgba(255, 95, 87, 0.1); + border-color: var(--error); +} + +/* FAQ */ +.faq-list { + display: flex; + flex-direction: column; + gap: 1rem; +} + +.faq-item { + background: var(--deep-space); + border: 1px solid var(--border-subtle); + border-radius: 10px; + padding: 1.25rem; +} + +.faq-item h4 { + font-size: 1rem; + font-weight: 600; + margin-bottom: 0.5rem; + color: var(--text-primary); +} + +.faq-item p { + font-size: 0.9rem; + color: var(--text-secondary); + line-height: 1.6; + margin: 0; +} + +.faq-item a { + color: var(--manhattan-glow); +} + +/* Guide Footer */ +.guide-footer { + text-align: center; + padding: 2rem; + border-top: 1px solid var(--border-subtle); + color: var(--text-muted); + font-size: 0.9rem; +} + +/* Responsive */ +@media (max-width: 1024px) { + .sidebar { + display: none; + } + .docs-content, + .docs-footer { + margin-left: 0; + padding: 2rem; + } +} + +@media (max-width: 768px) { + nav { + padding: 1rem 1.5rem; + } + + .nav-links { + display: none; + } + + .hero { + padding: 6rem 1.5rem 3rem; + } + + .logo-text { + font-size: clamp(2rem, 10vw, 4rem); + } + + .exchanges-preview { + flex-direction: column; + gap: 1.5rem; + } + + .exchange-logos { + gap: 1.5rem; + } + + .exchange-logo { + width: 40px; + height: 40px; + } + + .code-block { + padding: 1.25rem; + } + + .code-block pre { + font-size: 0.8rem; + } + + .install-box { + flex-direction: column; + width: 100%; + max-width: 400px; + } + + .onboarding-container { + padding: 2rem 1rem; + } + + .onboarding-header h1 { + font-size: 1.75rem; + } + + .step-card { + padding: 1.5rem; + } + + .config-code { + font-size: 0.7rem; + } + + .docs-content h1 { + font-size: 2rem; + } + + .docs-content .code-block pre { + font-size: 0.75rem; + } + + /* Guide page responsive */ + .guide-nav { + padding: 1rem 1.5rem; + } + + .guide-content { + padding: 2rem 1.5rem; + } + + .guide-header h1 { + font-size: 1.75rem; + } + + .security-grid { + grid-template-columns: 1fr; + } + + .step-content { + padding-left: 0; + margin-top: 1rem; + } + + .config-block .config-header { + flex-direction: column; + gap: 0.5rem; + align-items: flex-start; + } +} diff --git a/website/src/wagmi.ts b/website/src/wagmi.ts new file mode 100644 index 0000000..f27881d --- /dev/null +++ b/website/src/wagmi.ts @@ -0,0 +1,59 @@ +import { getDefaultConfig } from '@rainbow-me/rainbowkit' +import { polygon } from 'wagmi/chains' + +export const config = getDefaultConfig({ + appName: 'Dr. Manhattan', + projectId: 'a1b2c3d4e5f6', // Get from WalletConnect Cloud + chains: [polygon], +}) + +// Authentication message format +export const AUTH_MESSAGE_PREFIX = 'I authorize Dr. Manhattan to trade on Polymarket on my behalf.' + +// Expiry options in seconds +export const EXPIRY_OPTIONS = [ + { label: '24 hours', value: 86400 }, + { label: '7 days', value: 604800 }, + { label: '30 days', value: 2592000 }, + { label: '90 days', value: 7776000 }, +] as const + +export type ExpiryOption = typeof EXPIRY_OPTIONS[number]['value'] + +export function createAuthMessage(walletAddress: string, timestamp: number, expirySeconds: number): string { + return `${AUTH_MESSAGE_PREFIX} + +Wallet: ${walletAddress} +Timestamp: ${timestamp} +Expiry: ${expirySeconds}` +} + +// Server operator address (to be updated when deployed) +export const OPERATOR_ADDRESS = '0x0000000000000000000000000000000000000000' + +// CTF Contract address on Polygon +export const CTF_CONTRACT_ADDRESS = '0x4d97dcd97ec945f40cf65f87097ace5ea0476045' + +// CTF Contract ABI (only setApprovalForAll) +export const CTF_ABI = [ + { + name: 'setApprovalForAll', + type: 'function', + stateMutability: 'nonpayable', + inputs: [ + { name: 'operator', type: 'address' }, + { name: 'approved', type: 'bool' }, + ], + outputs: [], + }, + { + name: 'isApprovedForAll', + type: 'function', + stateMutability: 'view', + inputs: [ + { name: 'owner', type: 'address' }, + { name: 'operator', type: 'address' }, + ], + outputs: [{ name: '', type: 'bool' }], + }, +] as const diff --git a/website/tsconfig.json b/website/tsconfig.json new file mode 100644 index 0000000..bfa0fea --- /dev/null +++ b/website/tsconfig.json @@ -0,0 +1,29 @@ +{ + "compilerOptions": { + // Environment setup & latest features + "lib": ["ESNext"], + "target": "ESNext", + "module": "Preserve", + "moduleDetection": "force", + "jsx": "react-jsx", + "allowJs": true, + + // Bundler mode + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "verbatimModuleSyntax": true, + "noEmit": true, + + // Best practices + "strict": true, + "skipLibCheck": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedIndexedAccess": true, + "noImplicitOverride": true, + + // Some stricter flags (disabled by default) + "noUnusedLocals": false, + "noUnusedParameters": false, + "noPropertyAccessFromIndexSignature": false + } +} diff --git a/website/vite.config.ts b/website/vite.config.ts new file mode 100644 index 0000000..fd568ff --- /dev/null +++ b/website/vite.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from 'vite' +import react from '@vitejs/plugin-react' + +export default defineConfig({ + plugins: [react()], + build: { + outDir: 'dist', + }, + server: { + port: 3000, + }, +}) diff --git a/wiki/README.md b/wiki/README.md index 0e52c1a..da2e55e 100644 --- a/wiki/README.md +++ b/wiki/README.md @@ -11,6 +11,12 @@ Exchange-specific documentation: - Limitless - Prediction market on Base - [Template](exchanges/TEMPLATE.md) - Template for creating new exchange documentation +## MCP Server + +Use Dr. Manhattan from Claude Desktop or Claude Code: + +- [Remote Server (SSE)](mcp/remote-server.md) - Connect without local installation + ## Strategies Trading strategy documentation: diff --git a/wiki/mcp/remote-server.md b/wiki/mcp/remote-server.md new file mode 100644 index 0000000..0221cd7 --- /dev/null +++ b/wiki/mcp/remote-server.md @@ -0,0 +1,174 @@ +# Remote MCP Server (SSE) + +Connect to Dr. Manhattan from Claude Desktop or Claude Code without local installation. + +## Quick Start + +**Server URL:** `https://dr-manhattan-mcp-production.up.railway.app/sse` + +### Step 1: Connect Your Wallet + +Go to [dr-manhattan.io/approve](https://dr-manhattan.io/approve) to: +1. Connect your Polymarket wallet +2. Approve Dr. Manhattan as an operator (one-time on-chain transaction) +3. Sign an authentication message (free, proves wallet ownership) +4. Copy your configuration + +### Step 2: Add Configuration + +Paste the configuration into your Claude settings: + +**Claude Code:** `~/.claude/settings.json` +**Claude Desktop:** `~/Library/Application Support/Claude/claude_desktop_config.json` (macOS) + +Example configuration: +```json +{ + "mcpServers": { + "dr-manhattan": { + "type": "sse", + "url": "https://dr-manhattan-mcp-production.up.railway.app/sse", + "headers": { + "X-Polymarket-Wallet-Address": "0xYourWalletAddress", + "X-Polymarket-Auth-Signature": "0xYourSignature...", + "X-Polymarket-Auth-Timestamp": "1706123456" + } + } + } +} +``` + +### Step 3: Verify Connection + +Restart Claude and run `/mcp` to see available tools. + +## Read-Only Mode + +You can connect without any credentials to browse markets: + +```bash +claude mcp add dr-manhattan \ + --transport sse \ + --url "https://dr-manhattan-mcp-production.up.railway.app/sse" +``` + +Available without credentials: +- `fetch_markets` - Browse all prediction markets +- `fetch_market` - Get market details and prices +- `fetch_orderbook` - View order book depth +- `search_markets` - Search markets by keyword + +## How It Works + +1. You connect your wallet and approve Dr. Manhattan as an operator +2. You sign a message proving wallet ownership (no gas, free) +3. The signature is included in your configuration +4. The server verifies your signature on each request +5. Orders execute from your account + +**Security:** +- Your private key never leaves your wallet +- Signatures expire after 24 hours (re-authenticate if needed) +- You can revoke operator access anytime on-chain +- Each order executes from your account, not the server's + +## Available Operations + +### Read Operations (All Exchanges) + +| Tool | Description | +|------|-------------| +| `list_exchanges` | List available exchanges | +| `fetch_markets` | Browse all markets | +| `search_markets` | Search by keyword | +| `fetch_market` | Get market details | +| `fetch_orderbook` | View order book | +| `fetch_token_ids` | Get token IDs | + +### Write Operations (Polymarket Only) + +| Tool | Description | +|------|-------------| +| `create_order` | Place an order | +| `cancel_order` | Cancel an order | +| `cancel_all_orders` | Cancel all orders | +| `fetch_balance` | Check balance | +| `fetch_positions` | View positions | +| `fetch_open_orders` | List open orders | + +## Troubleshooting + +### "Signature has expired" + +Your authentication signature is valid for 24 hours. Re-authenticate at [dr-manhattan.io/approve](https://dr-manhattan.io/approve). + +### "User has not approved operator" + +You need to approve the server address as an operator on Polymarket. Visit [dr-manhattan.io/approve](https://dr-manhattan.io/approve) and complete Step 1. + +### "Signature does not match wallet address" + +Make sure you're using the same wallet that you authenticated with. Re-authenticate if needed. + +### "Write operations are not supported for X" + +Write operations are only available for Polymarket. For other exchanges, use the [local MCP server](../../README.md#mcp-server). + +### Connection timeout + +The server may be cold-starting. Wait 10-30 seconds and retry. + +### Check server status + +```bash +curl https://dr-manhattan-mcp-production.up.railway.app/health +``` + +## Self-Hosting + +Deploy your own instance for full control: + +```bash +# Clone repository +git clone https://github.com/guzus/dr-manhattan.git +cd dr-manhattan + +# Install dependencies +uv sync --extra mcp + +# Set your operator key +export POLYMARKET_OPERATOR_KEY="0xYourPrivateKey" + +# Run SSE server +uv run python -m dr_manhattan.mcp.server_sse +``` + +### Environment Variables + +| Variable | Default | Description | +|----------|---------|-------------| +| `PORT` | 8080 | Server port | +| `LOG_LEVEL` | INFO | Logging level | +| `POLYMARKET_OPERATOR_KEY` | - | Server's private key for signing | + +## Alternative: Builder Profile + +If you prefer to use your own API credentials instead of operator mode. + +### Getting Credentials + +1. Go to [Polymarket](https://polymarket.com) and connect your wallet +2. Click on your profile icon > **Settings** > **API Keys** +3. Click **Create API Key** and set a passphrase +4. Save your credentials (API Secret is shown only once) + +### Configuration + +```bash +claude mcp add dr-manhattan \ + --transport sse \ + --url "https://dr-manhattan-mcp-production.up.railway.app/sse" \ + --header "X-Polymarket-Api-Key: your_api_key" \ + --header "X-Polymarket-Api-Secret: your_api_secret" \ + --header "X-Polymarket-Passphrase: your_passphrase" +```