From cef6333ac0f5a548cb6488d9770324ce488829e8 Mon Sep 17 00:00:00 2001 From: Stefano Dalla Gasperina Date: Thu, 22 Jan 2026 08:13:33 -0600 Subject: [PATCH 1/2] ci: add GitHub Actions workflows Add workflows for clone metrics, view metrics, and ruff linting. Co-Authored-By: Claude Opus 4.5 --- .github/workflows/clone-metrics.yml | 151 ++++++++++++++++++++++++++++ .github/workflows/ruff.yml | 27 +++++ .github/workflows/view-metrics.yml | 151 ++++++++++++++++++++++++++++ 3 files changed, 329 insertions(+) create mode 100644 .github/workflows/clone-metrics.yml create mode 100644 .github/workflows/ruff.yml create mode 100644 .github/workflows/view-metrics.yml diff --git a/.github/workflows/clone-metrics.yml b/.github/workflows/clone-metrics.yml new file mode 100644 index 0000000..6e82bac --- /dev/null +++ b/.github/workflows/clone-metrics.yml @@ -0,0 +1,151 @@ +name: Track Clone Metrics + +on: + workflow_dispatch: + schedule: + - cron: '0 8 * * *' # Run every day at 8am + + +jobs: + clone-stats: + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history for proper branch operations + + - name: Generate GitHub App token + id: generate_token + uses: tibdex/github-app-token@v2.1.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + + - name: Switch to metrics branch + run: | + # Checkout or create metrics branch + if git show-ref --verify --quiet refs/remotes/origin/metrics; then + echo "📋 Checking out existing metrics branch..." + git checkout -b metrics origin/metrics || git checkout metrics + else + echo "🆕 Creating new metrics branch..." + git checkout -b metrics + fi + + - name: Fetch clone data + env: + TOKEN: ${{ steps.generate_token.outputs.token }} + run: | + mkdir -p .metrics + # Fetch clone metrics (contains both daily breakdown and 14-day totals) + curl -s -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $TOKEN" \ + https://api.github.com/repos/${{ github.repository }}/traffic/clones \ + > .metrics/clone_stats.json + + echo "Clone metrics:" + cat .metrics/clone_stats.json + + - name: Update daily metrics + run: | + # Process each day from the clones array + LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + + # Create daily CSV with header if it doesn't exist + if [ ! -f .metrics/daily_clone_metrics.csv ]; then + echo "date,total_clones,unique_cloners,last_updated" > .metrics/daily_clone_metrics.csv + fi + + echo "📊 Processing daily metrics..." + jq -r '.clones[] | "\(.timestamp | split("T")[0]),\(.count),\(.uniques)"' .metrics/clone_stats.json | while IFS=',' read -r day_date count uniques; do + echo "Processing $day_date: $count clones, $uniques unique" + + # Check if this date already exists in the CSV + if grep -q "^$day_date" .metrics/daily_clone_metrics.csv; then + echo "📝 Updating existing entry for $day_date..." + # Update existing entry + awk -v date="$day_date" -v count="$count" -v uniques="$uniques" -v last_updated="$LAST_UPDATED" ' + BEGIN { FS=","; OFS="," } + /^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date { + print $1, count, uniques, last_updated; + updated=1; + next + } + { print } + ' .metrics/daily_clone_metrics.csv > .metrics/daily_clone_metrics_temp.csv + mv .metrics/daily_clone_metrics_temp.csv .metrics/daily_clone_metrics.csv + else + echo "➕ Adding new daily entry for $day_date..." + echo "$day_date,$count,$uniques,$LAST_UPDATED" >> .metrics/daily_clone_metrics.csv + fi + done + + echo "Daily metrics:" + tail -n 5 .metrics/daily_clone_metrics.csv + + - name: Update 14-day rolling metrics + run: | + # Process 14-day metrics + COUNT_14D=$(jq '.count' .metrics/clone_stats.json) + UNIQUES_14D=$(jq '.uniques' .metrics/clone_stats.json) + DATE_ONLY=$(date -u +"%Y-%m-%d") + LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + + echo "📊 Processing 14-day metrics... for date: $DATE_ONLY" + echo "Processing values: $COUNT_14D clones, $UNIQUES_14D unique" + + # Create 14-day CSV with header if it doesn't exist + if [ ! -f .metrics/rolling_14d_clone_metrics.csv ]; then + echo "date,total_clones_14d,unique_cloners_14d,last_updated" > .metrics/rolling_14d_clone_metrics.csv + echo "📄 Created new 14-day rolling CSV file" + fi + + # Check if today's date already exists in the 14-day CSV + if grep -q "^$DATE_ONLY" .metrics/rolling_14d_clone_metrics.csv; then + echo "📝 Updating existing 14-day rolling entry for $DATE_ONLY..." + # Update existing entry + awk -v date="$DATE_ONLY" -v count="$COUNT_14D" -v uniques="$UNIQUES_14D" -v last_updated="$LAST_UPDATED" ' + BEGIN { FS=","; OFS=","; updated=0 } + /^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date { + print $1, count, uniques, last_updated; + updated=1; + next + } + { print } + END { if (!updated) print date, count, uniques, last_updated } + ' .metrics/rolling_14d_clone_metrics.csv > .metrics/rolling_14d_clone_metrics_temp.csv + mv .metrics/rolling_14d_clone_metrics_temp.csv .metrics/rolling_14d_clone_metrics.csv + echo "✅ Updated existing entry" + else + echo "➕ Adding new 14-day rolling entry for $DATE_ONLY..." + echo "$DATE_ONLY,$COUNT_14D,$UNIQUES_14D,$LAST_UPDATED" >> .metrics/rolling_14d_clone_metrics.csv + echo "✅ Added new entry" + fi + + echo "14-day rolling metrics:" + tail -n 5 .metrics/rolling_14d_clone_metrics.csv + + - name: Commit and push results + env: + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} + run: | + git config user.name "CloneMetricsBot[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + # Add both CSV files + git add .metrics/daily_clone_metrics.csv .metrics/rolling_14d_clone_metrics.csv + + # Check if there are changes to commit + if git diff --staged --quiet; then + echo "â„šī¸ No changes to commit - CSV data is up to date" + else + echo "📝 Committing changes..." + git commit -m "Automated update: repository clone metrics $(date)" + + echo "🚀 Pushing to metrics branch..." + git push --force-with-lease origin metrics + fi diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 0000000..35df8f4 --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,27 @@ +name: Ruff Lint & Format + +on: + pull_request: + push: + branches: [develop, main] + +jobs: + ruff: + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Ruff + run: pip install ruff + + - name: Run Ruff check (lint) + run: ruff check . + + - name: Run Ruff format (verify formatting) + run: ruff format --check . diff --git a/.github/workflows/view-metrics.yml b/.github/workflows/view-metrics.yml new file mode 100644 index 0000000..c0905ec --- /dev/null +++ b/.github/workflows/view-metrics.yml @@ -0,0 +1,151 @@ +name: Track View Metrics + +on: + workflow_dispatch: + workflow_run: + workflows: ["Track Clone Metrics"] # exact name of PyPI workflow + types: [completed] + +jobs: + view-stats: + runs-on: ubuntu-latest + permissions: + contents: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Fetch all history for proper branch operations + + - name: Generate GitHub App token + id: generate_token + uses: tibdex/github-app-token@v2.1.0 + with: + app_id: ${{ secrets.APP_ID }} + private_key: ${{ secrets.APP_PRIVATE_KEY }} + + - name: Switch to metrics branch + run: | + # Checkout or create metrics branch + if git show-ref --verify --quiet refs/remotes/origin/metrics; then + echo "📋 Checking out existing metrics branch..." + git checkout -b metrics origin/metrics || git checkout metrics + else + echo "🆕 Creating new metrics branch..." + git checkout -b metrics + fi + + - name: Fetch view data + env: + TOKEN: ${{ steps.generate_token.outputs.token }} + run: | + mkdir -p .metrics + # Fetch view metrics (contains both daily breakdown and 14-day totals) + curl -s -H "Accept: application/vnd.github+json" \ + -H "Authorization: Bearer $TOKEN" \ + https://api.github.com/repos/${{ github.repository }}/traffic/views \ + > .metrics/view_stats.json + + echo "View metrics:" + cat .metrics/view_stats.json + + - name: Update daily metrics + run: | + # Process each day from the views array + LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + + # Create daily CSV with header if it doesn't exist + if [ ! -f .metrics/daily_view_metrics.csv ]; then + echo "date,total_views,unique_visitors,last_updated" > .metrics/daily_view_metrics.csv + fi + + echo "📊 Processing daily metrics..." + jq -r '.views[] | "\(.timestamp | split("T")[0]),\(.count),\(.uniques)"' .metrics/view_stats.json | while IFS=',' read -r day_date count uniques; do + echo "Processing $day_date: $count views, $uniques unique" + + # Check if this date already exists in the CSV + if grep -q "^$day_date" .metrics/daily_view_metrics.csv; then + echo "📝 Updating existing entry for $day_date..." + # Update existing entry + awk -v date="$day_date" -v count="$count" -v uniques="$uniques" -v last_updated="$LAST_UPDATED" ' + BEGIN { FS=","; OFS="," } + /^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date { + print $1, count, uniques, last_updated; + updated=1; + next + } + { print } + ' .metrics/daily_view_metrics.csv > .metrics/daily_view_metrics_temp.csv + mv .metrics/daily_view_metrics_temp.csv .metrics/daily_view_metrics.csv + else + echo "➕ Adding new daily entry for $day_date..." + echo "$day_date,$count,$uniques,$LAST_UPDATED" >> .metrics/daily_view_metrics.csv + fi + done + + echo "Daily metrics:" + tail -n 5 .metrics/daily_view_metrics.csv + + - name: Update 14-day rolling metrics + run: | + # Process 14-day metrics + COUNT_14D=$(jq '.count' .metrics/view_stats.json) + UNIQUES_14D=$(jq '.uniques' .metrics/view_stats.json) + DATE_ONLY=$(date -u +"%Y-%m-%d") + LAST_UPDATED=$(date -u +"%Y-%m-%d %H:%M:%S UTC") + + echo "📊 Processing 14-day metrics... for date: $DATE_ONLY" + echo "Processing values: $COUNT_14D views, $UNIQUES_14D unique" + + # Create 14-day CSV with header if it doesn't exist + if [ ! -f .metrics/rolling_14d_view_metrics.csv ]; then + echo "date,total_views_14d,unique_visitors_14d,last_updated" > .metrics/rolling_14d_view_metrics.csv + echo "📄 Created new 14-day rolling CSV file" + fi + + # Check if today's date already exists in the 14-day CSV + if grep -q "^$DATE_ONLY" .metrics/rolling_14d_view_metrics.csv; then + echo "📝 Updating existing 14-day rolling entry for $DATE_ONLY..." + # Update existing entry + awk -v date="$DATE_ONLY" -v count="$COUNT_14D" -v uniques="$UNIQUES_14D" -v last_updated="$LAST_UPDATED" ' + BEGIN { FS=","; OFS=","; updated=0 } + /^[0-9]{4}-[0-9]{2}-[0-9]{2}/ && $1 == date { + print $1, count, uniques, last_updated; + updated=1; + next + } + { print } + END { if (!updated) print date, count, uniques, last_updated } + ' .metrics/rolling_14d_view_metrics.csv > .metrics/rolling_14d_view_metrics_temp.csv + mv .metrics/rolling_14d_view_metrics_temp.csv .metrics/rolling_14d_view_metrics.csv + echo "✅ Updated existing entry" + else + echo "➕ Adding new 14-day rolling entry for $DATE_ONLY..." + echo "$DATE_ONLY,$COUNT_14D,$UNIQUES_14D,$LAST_UPDATED" >> .metrics/rolling_14d_view_metrics.csv + echo "✅ Added new entry" + fi + + echo "14-day rolling metrics:" + tail -n 5 .metrics/rolling_14d_view_metrics.csv + + - name: Commit and push results + env: + GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} + run: | + git config user.name "ViewMetricsBot[bot]" + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + + # Add both CSV files + git add .metrics/daily_view_metrics.csv .metrics/rolling_14d_view_metrics.csv + + # Check if there are changes to commit + if git diff --staged --quiet; then + echo "â„šī¸ No changes to commit - CSV data is up to date" + else + echo "📝 Committing changes..." + git commit -m "Automated update: repository view metrics $(date)" + + echo "🚀 Pushing to metrics branch..." + git push --force-with-lease origin metrics + fi \ No newline at end of file From 50b6bda762583cf913cf3dc8bdf6bfbd687b8d94 Mon Sep 17 00:00:00 2001 From: Stefano Dalla Gasperina Date: Thu, 22 Jan 2026 08:56:57 -0600 Subject: [PATCH 2/2] style: apply ruff formatting and fix linting errors - Format code with ruff - Remove unused variables - Replace bare except with except Exception - Add noqa comments for intentional late imports Co-Authored-By: Claude Opus 4.5 --- cli.py | 308 +++++++++++++++++++++------------------ config.py | 5 +- logging_config.py | 29 ++-- main.py | 34 +++-- oauth/endpoints.py | 255 ++++++++++++++++++++------------ oauth/jwt_utils.py | 50 +++---- oauth/middleware.py | 39 +++-- setup.py | 93 +++++++----- sse.py | 12 +- submodule_deps.py | 57 +++++--- submodule_integration.py | 39 +++-- 11 files changed, 547 insertions(+), 374 deletions(-) diff --git a/cli.py b/cli.py index 9d5300d..280acde 100644 --- a/cli.py +++ b/cli.py @@ -5,6 +5,7 @@ This runs the LOCAL MCP server on the user's machine. On first run, it opens a browser for login via Railway. """ + import argparse import os import platform @@ -45,7 +46,9 @@ # Cloudflared auto-install settings CLOUDFLARED_INSTALL_DIR = Path.home() / ".local" / "bin" -CLOUDFLARED_RELEASES_URL = "https://github.com/cloudflare/cloudflared/releases/latest/download" +CLOUDFLARED_RELEASES_URL = ( + "https://github.com/cloudflare/cloudflared/releases/latest/download" +) # Daemon settings CONFIG_DIR = Path.home() / ".simple-mcp-server" @@ -55,6 +58,7 @@ # ============== Helper Functions ============== + def fetch_user_info(access_token: str) -> dict: """Fetch user info from Supabase using access token.""" if not SUPABASE_URL or not SUPABASE_ANON_KEY: @@ -68,8 +72,12 @@ def fetch_user_info(access_token: str) -> dict: return { "user_id": user.id, "email": user.email, - "name": user.user_metadata.get("name", "") if user.user_metadata else "", - "organization": user.user_metadata.get("organization", "") if user.user_metadata else "", + "name": user.user_metadata.get("name", "") + if user.user_metadata + else "", + "organization": user.user_metadata.get("organization", "") + if user.user_metadata + else "", } except Exception: pass @@ -87,9 +95,7 @@ def check_cloudflared_service() -> bool: return False try: result = subprocess.run( - ["sc", "query", "cloudflared"], - capture_output=True, - text=True + ["sc", "query", "cloudflared"], capture_output=True, text=True ) return "RUNNING" in result.stdout except Exception: @@ -103,7 +109,7 @@ def check_cloudflared_process() -> bool: result = subprocess.run( ["tasklist", "/FI", "IMAGENAME eq cloudflared.exe"], capture_output=True, - text=True + text=True, ) return "cloudflared.exe" in result.stdout except Exception: @@ -112,9 +118,7 @@ def check_cloudflared_process() -> bool: # Linux/macOS: use pgrep try: result = subprocess.run( - ["pgrep", "-x", "cloudflared"], - capture_output=True, - text=True + ["pgrep", "-x", "cloudflared"], capture_output=True, text=True ) return result.returncode == 0 except Exception: @@ -138,12 +142,8 @@ def is_server_running() -> bool: """Check if MCP server is already running on port 8766.""" if platform.system() == "Windows": try: - result = subprocess.run( - ["netstat", "-ano"], - capture_output=True, - text=True - ) - for line in result.stdout.split('\n'): + result = subprocess.run(["netstat", "-ano"], capture_output=True, text=True) + for line in result.stdout.split("\n"): if ":8766" in line and "LISTENING" in line: return True except Exception: @@ -151,9 +151,7 @@ def is_server_running() -> bool: else: try: result = subprocess.run( - ["lsof", "-ti", ":8766"], - capture_output=True, - text=True + ["lsof", "-ti", ":8766"], capture_output=True, text=True ) return bool(result.stdout.strip()) except Exception: @@ -167,7 +165,7 @@ def run_cloudflared_tunnel(tunnel_token: str) -> subprocess.Popen: return subprocess.Popen( [cloudflared_cmd, "tunnel", "run", "--token", tunnel_token], stdout=subprocess.PIPE, - stderr=subprocess.PIPE + stderr=subprocess.PIPE, ) @@ -179,7 +177,7 @@ def kill_cloudflared_processes(): result = subprocess.run( ["taskkill", "/F", "/IM", "cloudflared.exe"], capture_output=True, - text=True + text=True, ) if "SUCCESS" in result.stdout: killed = True @@ -187,7 +185,9 @@ def kill_cloudflared_processes(): pass else: try: - result = subprocess.run(["pkill", "-f", "cloudflared tunnel run"], capture_output=True) + result = subprocess.run( + ["pkill", "-f", "cloudflared tunnel run"], capture_output=True + ) killed = result.returncode == 0 except Exception: pass @@ -199,20 +199,15 @@ def kill_processes_on_port(port: int) -> bool: killed = False if platform.system() == "Windows": try: - result = subprocess.run( - ["netstat", "-ano"], - capture_output=True, - text=True - ) - for line in result.stdout.split('\n'): + result = subprocess.run(["netstat", "-ano"], capture_output=True, text=True) + for line in result.stdout.split("\n"): if f":{port}" in line and "LISTENING" in line: parts = line.split() if len(parts) >= 5: pid = parts[-1] try: subprocess.run( - ["taskkill", "/F", "/PID", pid], - capture_output=True + ["taskkill", "/F", "/PID", pid], capture_output=True ) killed = True except Exception: @@ -222,11 +217,9 @@ def kill_processes_on_port(port: int) -> bool: else: try: result = subprocess.run( - ["lsof", "-ti", f":{port}"], - capture_output=True, - text=True + ["lsof", "-ti", f":{port}"], capture_output=True, text=True ) - for pid in result.stdout.strip().split('\n'): + for pid in result.stdout.strip().split("\n"): if pid: subprocess.run(["kill", "-9", pid], capture_output=True) killed = True @@ -275,7 +268,7 @@ def install_cloudflared() -> bool: response = requests.get(url, stream=True, timeout=60) response.raise_for_status() - with open(dest, 'wb') as f: + with open(dest, "wb") as f: for chunk in response.iter_content(chunk_size=8192): f.write(chunk) @@ -289,7 +282,7 @@ def install_cloudflared() -> bool: print("\n ~/.local/bin is not in your PATH.") try: response = input(" Add to ~/.bashrc? (y/n): ").strip().lower() - if response == 'y': + if response == "y": if add_to_bashrc(): print(" Added to ~/.bashrc") print(" Run: source ~/.bashrc (or restart terminal)") @@ -315,17 +308,19 @@ def is_local_bin_in_path() -> bool: def add_to_bashrc() -> bool: """Add ~/.local/bin to PATH in ~/.bashrc.""" bashrc = Path.home() / ".bashrc" - export_line = '\n# Added by simple-mcp-server\nexport PATH="$HOME/.local/bin:$PATH"\n' + export_line = ( + '\n# Added by simple-mcp-server\nexport PATH="$HOME/.local/bin:$PATH"\n' + ) try: # Check if already added if bashrc.exists(): content = bashrc.read_text() - if '.local/bin' in content: + if ".local/bin" in content: return True # Already there # Append to bashrc - with open(bashrc, 'a') as f: + with open(bashrc, "a") as f: f.write(export_line) return True except Exception: @@ -357,14 +352,14 @@ def ensure_cloudflared() -> bool: local_bin = CLOUDFLARED_INSTALL_DIR / "cloudflared" if local_bin.exists(): print(f"\n[INFO] cloudflared found at {local_bin}") - print(f" Add to PATH: export PATH=\"$HOME/.local/bin:$PATH\"") + print(' Add to PATH: export PATH="$HOME/.local/bin:$PATH"') return True # Auto-install on Linux/macOS if platform.system() in ("Linux", "Darwin"): print("\n[INFO] cloudflared not found. Installing automatically...") if install_cloudflared(): - print(f"\n[INFO] Add to PATH: export PATH=\"$HOME/.local/bin:$PATH\"") + print('\n[INFO] Add to PATH: export PATH="$HOME/.local/bin:$PATH"') print(" Or add to ~/.bashrc for permanent PATH update.\n") return True @@ -373,6 +368,7 @@ def ensure_cloudflared() -> bool: # ============== Daemon Functions ============== + def save_pid(pid: int): """Save daemon PID to file.""" CONFIG_DIR.mkdir(parents=True, exist_ok=True) @@ -400,9 +396,7 @@ def is_process_running(pid: int) -> bool: if platform.system() == "Windows": try: result = subprocess.run( - ["tasklist", "/FI", f"PID eq {pid}"], - capture_output=True, - text=True + ["tasklist", "/FI", f"PID eq {pid}"], capture_output=True, text=True ) return str(pid) in result.stdout except Exception: @@ -465,13 +459,14 @@ def daemonize(): sys.stderr.flush() CONFIG_DIR.mkdir(parents=True, exist_ok=True) - log_fd = open(LOG_FILE, 'a') + log_fd = open(LOG_FILE, "a") os.dup2(log_fd.fileno(), sys.stdout.fileno()) os.dup2(log_fd.fileno(), sys.stderr.fileno()) # ============== CLI Commands ============== + def cmd_start(): """Start the MCP server in background.""" from setup import run_login_flow @@ -505,7 +500,9 @@ def cmd_start(): # Check cloudflared (auto-install on Linux/macOS if needed) if not ensure_cloudflared(): print("\n[ERROR] cloudflared not found and auto-install failed.") - print(" Install manually: https://developers.cloudflare.com/cloudflare-one/connections/connect-apps/install-and-setup/installation/") + print( + " Install manually: https://developers.cloudflare.com/cloudflare-one/connections/connect-apps/install-and-setup/installation/" + ) sys.exit(1) # Warn about cloudflared service @@ -567,10 +564,11 @@ def cmd_start(): cmd = [sys.executable, str(script_path), "_daemon"] proc = subprocess.Popen( cmd, - creationflags=subprocess.CREATE_NEW_PROCESS_GROUP | subprocess.DETACHED_PROCESS, - stdout=open(LOG_FILE, 'a'), + creationflags=subprocess.CREATE_NEW_PROCESS_GROUP + | subprocess.DETACHED_PROCESS, + stdout=open(LOG_FILE, "a"), stderr=subprocess.STDOUT, - stdin=subprocess.DEVNULL + stdin=subprocess.DEVNULL, ) save_pid(proc.pid) print(f"Server started in background (PID: {proc.pid})") @@ -580,6 +578,7 @@ def cmd_start(): if pid > 0: # Parent process - wait a moment then exit import time + time.sleep(1) # Give child time to start # Read the PID saved by child child_pid = read_pid() @@ -605,7 +604,7 @@ def cmd_start(): sys.stdout.flush() sys.stderr.flush() CONFIG_DIR.mkdir(parents=True, exist_ok=True) - log_fd = open(LOG_FILE, 'a') + log_fd = open(LOG_FILE, "a") os.dup2(log_fd.fileno(), sys.stdout.fileno()) os.dup2(log_fd.fileno(), sys.stderr.fileno()) @@ -680,7 +679,7 @@ def cmd_login(): if config.is_valid(): print(f"\nAlready logged in as: {config.email}") response = input("Re-login with a different account? [y/N]: ").strip().lower() - if response != 'y': + if response != "y": print("Login cancelled.") return @@ -714,15 +713,15 @@ def cmd_status(): # Account print("\n[Account]") if config.is_valid(): - print(f" Status: Logged in") + print(" Status: Logged in") print(f" Email: {config.email}") print(f" User ID: {config.user_id[:8]}...") if SUPABASE_URL and SUPABASE_ANON_KEY: user_info = fetch_user_info(config.access_token) if user_info: - if user_info.get('name'): + if user_info.get("name"): print(f" Name: {user_info['name']}") - if user_info.get('organization'): + if user_info.get("organization"): print(f" Org: {user_info['organization']}") else: print(" Status: Not logged in") @@ -731,7 +730,7 @@ def cmd_status(): # Tunnel print("\n[Tunnel]") if config.has_tunnel(): - print(f" Status: Configured") + print(" Status: Configured") print(f" Name: {config.robot_name}") print(f" URL: {config.tunnel_url}") print() @@ -756,14 +755,16 @@ def cmd_status(): print("\n[Cloudflared]") local_bin = CLOUDFLARED_INSTALL_DIR / "cloudflared" if check_cloudflared(): - print(f" Status: Installed (system)") + print(" Status: Installed (system)") print(f" Path: {shutil.which('cloudflared')}") elif local_bin.exists(): - print(f" Status: Installed (local)") + print(" Status: Installed (local)") print(f" Path: {local_bin}") else: print(" Status: Not installed") - print(" Install: https://developers.cloudflare.com/cloudflare-one/connections/connect-apps/install-and-setup/installation/") + print( + " Install: https://developers.cloudflare.com/cloudflare-one/connections/connect-apps/install-and-setup/installation/" + ) # Show tunnel process status if check_cloudflared_process(): @@ -787,11 +788,11 @@ def cmd_status(): def cmd_verify(): """Comprehensive verification of server, tunnel, and connectivity.""" config = load_config() - + print("\n" + "=" * 70) print(" Comprehensive Tunnel & Server Verification") print("=" * 70) - + # Initialize results tracking results = { "config": False, @@ -799,9 +800,9 @@ def cmd_verify(): "cloudflared": False, "tunnel_auth": False, "dns": False, - "tunnel_endpoints": False + "tunnel_endpoints": False, } - + # ========== 1. Configuration Check ========== print("\n[1] Configuration") print("-" * 70) @@ -811,55 +812,55 @@ def cmd_verify(): print("\n" + "=" * 70) return results["config"] = True - print(f" ✓ Configuration found") + print(" ✓ Configuration found") print(f" Robot Name: {config.robot_name}") print(f" Tunnel URL: {config.tunnel_url}") - print(f" Token: {'Present' if config.tunnel_token else 'Missing'} ({len(config.tunnel_token) if config.tunnel_token else 0} chars)") - + print( + f" Token: {'Present' if config.tunnel_token else 'Missing'} ({len(config.tunnel_token) if config.tunnel_token else 0} chars)" + ) + tunnel_url = config.tunnel_url from urllib.parse import urlparse + parsed = urlparse(tunnel_url) hostname = parsed.hostname - + # ========== 2. Local Server Test ========== print("\n[2] Local Server Connection") print("-" * 70) - server_running = False running, pid = is_daemon_running() if running: print(f" ✓ Server process running (PID: {pid})") - server_running = True elif is_server_running(): - print(f" ✓ Server running on port 8766 (not managed)") - server_running = True + print(" ✓ Server running on port 8766 (not managed)") else: - print(f" ✗ Server not running") - print(f" → Run: simple-mcp-server start") + print(" ✗ Server not running") + print(" → Run: simple-mcp-server start") print("\n" + "=" * 70) return - + # Test local HTTP connection try: response = requests.get("http://localhost:8766/health", timeout=2) if response.status_code == 200: - print(f" ✓ Local server responding") + print(" ✓ Local server responding") print(f" http://localhost:8766/health → HTTP {response.status_code}") results["server_local"] = True else: print(f" ⚠ Local server responding with HTTP {response.status_code}") except requests.exceptions.ConnectionError: - print(f" ✗ Cannot connect to localhost:8766") - print(f" → Check if server is actually running") + print(" ✗ Cannot connect to localhost:8766") + print(" → Check if server is actually running") except Exception as e: print(f" ✗ Error testing local server: {str(e)[:50]}") - + # ========== 3. Cloudflared Process ========== print("\n[3] Cloudflared Process") print("-" * 70) if check_cloudflared_process(): - print(f" ✓ Cloudflared process running") + print(" ✓ Cloudflared process running") results["cloudflared"] = True - + # Check cloudflared version cloudflared_path = get_cloudflared_path() try: @@ -867,49 +868,49 @@ def cmd_verify(): [cloudflared_path, "--version"], capture_output=True, text=True, - timeout=5 + timeout=5, ) if result.returncode == 0: - version = result.stdout.split('\n')[0] if result.stdout else "unknown" + version = result.stdout.split("\n")[0] if result.stdout else "unknown" print(f" Version: {version}") except Exception: pass else: - print(f" ✗ Cloudflared process not running") - print(f" → Run: simple-mcp-server start") + print(" ✗ Cloudflared process not running") + print(" → Run: simple-mcp-server start") print("\n" + "=" * 70) return - + # ========== 4. Tunnel Authentication & Status ========== print("\n[4] Tunnel Authentication & Status") print("-" * 70) logs = get_cloudflared_logs(100) auth_ok = False connections = 0 - last_config = None - + for line in logs: if "Settings: map[token:" in line: - print(f" ✓ Token loaded by cloudflared") + print(" ✓ Token loaded by cloudflared") auth_ok = True if "Registered tunnel connection" in line: connections += 1 import re - match = re.search(r'connection=([a-f0-9-]+)', line) + + match = re.search(r"connection=([a-f0-9-]+)", line) if match: conn_id = match.group(1)[:8] print(f" ✓ Tunnel connection registered: {conn_id}...") if "Updated to new configuration" in line: import json import re + json_match = re.search(r'config="({[^"]+})"', line) if json_match: try: config_json = json_match.group(1).replace('\\"', '"') config_data = json.loads(config_json) - last_config = config_data if "ingress" in config_data: - print(f" ✓ Configuration received from Cloudflare") + print(" ✓ Configuration received from Cloudflare") for rule in config_data["ingress"]: h = rule.get("hostname", "*") s = rule.get("service", "unknown") @@ -917,76 +918,88 @@ def cmd_verify(): print(f" Rule: {h} → {s}") except Exception: pass - if "ERR" in line and any(x in line.lower() for x in ["auth", "unauthorized", "forbidden", "401", "403"]): - print(f" ✗ Authentication error in logs") + if "ERR" in line and any( + x in line.lower() + for x in ["auth", "unauthorized", "forbidden", "401", "403"] + ): + print(" ✗ Authentication error in logs") print(f" {line.strip()[:80]}") auth_ok = False - + if auth_ok and connections > 0: - print(f" ✓ Authentication: SUCCESS") + print(" ✓ Authentication: SUCCESS") print(f" ✓ Active connections: {connections}") results["tunnel_auth"] = True else: - print(f" ⚠ Authentication status unclear") + print(" ⚠ Authentication status unclear") if not auth_ok: - print(f" → Check cloudflared logs for errors") - + print(" → Check cloudflared logs for errors") + # ========== 5. DNS Resolution ========== print("\n[5] DNS Resolution") print("-" * 70) if hostname: try: import socket + ip_addresses = socket.gethostbyname_ex(hostname) print(f" ✓ DNS record exists for {hostname}") print(f" Resolves to: {', '.join(ip_addresses[2][:3])}") - + # Check if Cloudflare IPs - cf_ips = [ip for ip in ip_addresses[2] if any(ip.startswith(p) for p in ['104.', '172.', '198.', '173.'])] + cf_ips = [ + ip + for ip in ip_addresses[2] + if any(ip.startswith(p) for p in ["104.", "172.", "198.", "173."]) + ] if cf_ips: - print(f" → Cloudflare IP detected ✓") + print(" → Cloudflare IP detected ✓") else: - print(f" ⚠ IPs don't look like Cloudflare") - + print(" ⚠ IPs don't look like Cloudflare") + results["dns"] = True except socket.gaierror as e: print(f" ✗ DNS resolution failed: {e}") print(f" Domain: {hostname}") - print(f" → DNS record missing! Add CNAME in Cloudflare:") + print(" → DNS record missing! Add CNAME in Cloudflare:") print(f" Name: {config.robot_name}") - print(f" Target: .cfargotunnel.com") - print(f" Proxy: Proxied (orange cloud) ✓") + print(" Target: .cfargotunnel.com") + print(" Proxy: Proxied (orange cloud) ✓") results["dns"] = False except Exception as e: print(f" ✗ DNS check error: {e}") results["dns"] = False else: - print(f" ✗ Invalid tunnel URL") + print(" ✗ Invalid tunnel URL") results["dns"] = False - + # ========== 6. Tunnel Endpoint Tests ========== print("\n[6] Tunnel Endpoint Tests") print("-" * 70) if not results["dns"]: - print(f" ⚠ Skipping tunnel tests (DNS not configured)") - print(f" → Fix DNS first, then re-run verify") + print(" ⚠ Skipping tunnel tests (DNS not configured)") + print(" → Fix DNS first, then re-run verify") else: endpoints = [ ("/", "Root endpoint"), ("/health", "Health check"), ] - + all_endpoints_ok = True for endpoint, description in endpoints: url = f"{tunnel_url}{endpoint}" try: response = requests.get(url, timeout=15, allow_redirects=True) if response.status_code == 200: - print(f" ✓ {endpoint:15} {description:20} HTTP {response.status_code}") + print( + f" ✓ {endpoint:15} {description:20} HTTP {response.status_code}" + ) else: - print(f" ⚠ {endpoint:15} {description:20} HTTP {response.status_code}") + print( + f" ⚠ {endpoint:15} {description:20} HTTP {response.status_code}" + ) if response.status_code in [502, 503]: - print(f" → Cloudflared can't reach localhost:8766") + print(" → Cloudflared can't reach localhost:8766") all_endpoints_ok = False except requests.exceptions.Timeout: print(f" ✗ {endpoint:15} {description:20} Timeout (15s)") @@ -995,11 +1008,11 @@ def cmd_verify(): error_msg = str(e) print(f" ✗ {endpoint:15} {description:20} Connection failed") if "Name or service not known" in error_msg or "nodename" in error_msg: - print(f" → DNS resolution issue") + print(" → DNS resolution issue") elif "Connection refused" in error_msg: - print(f" → Tunnel not accepting connections") + print(" → Tunnel not accepting connections") elif "Max retries" in error_msg: - print(f" → Cannot reach tunnel endpoint") + print(" → Cannot reach tunnel endpoint") all_endpoints_ok = False except requests.exceptions.SSLError as e: print(f" ✗ {endpoint:15} {description:20} SSL Error") @@ -1008,46 +1021,52 @@ def cmd_verify(): except Exception as e: print(f" ✗ {endpoint:15} {description:20} Error: {str(e)[:50]}") all_endpoints_ok = False - + results["tunnel_endpoints"] = all_endpoints_ok - + # ========== Summary ========== print("\n" + "=" * 70) print(" Verification Summary") print("=" * 70) - + total_checks = len(results) passed_checks = sum(1 for v in results.values() if v) - + print(f"\n Checks passed: {passed_checks}/{total_checks}") - print(f"\n Status:") + print("\n Status:") print(f" [1] Configuration: {'✓' if results['config'] else '✗'}") print(f" [2] Local Server: {'✓' if results['server_local'] else '✗'}") print(f" [3] Cloudflared Process: {'✓' if results['cloudflared'] else '✗'}") print(f" [4] Tunnel Authentication: {'✓' if results['tunnel_auth'] else '✗'}") print(f" [5] DNS Resolution: {'✓' if results['dns'] else '✗'}") - print(f" [6] Tunnel Endpoints: {'✓' if results['tunnel_endpoints'] else '✗'}") - + print( + f" [6] Tunnel Endpoints: {'✓' if results['tunnel_endpoints'] else '✗'}" + ) + if all(results.values()): - print(f"\n ✓ All checks passed! Your MCP server is fully operational.") - print(f"\n Access your server at:") + print("\n ✓ All checks passed! Your MCP server is fully operational.") + print("\n Access your server at:") print(f" {tunnel_url}/mcp") print(f" {tunnel_url}/sse") else: - print(f"\n ⚠ Some checks failed. See details above.") - print(f"\n Next steps:") + print("\n ⚠ Some checks failed. See details above.") + print("\n Next steps:") if not results["dns"]: - print(f" 1. Add DNS CNAME record in Cloudflare dashboard") - print(f" → Go to: https://dash.cloudflare.com") - print(f" → Domain: robotmcp.ai → DNS → Records") - print(f" → Add: {config.robot_name} CNAME → .cfargotunnel.com") + print(" 1. Add DNS CNAME record in Cloudflare dashboard") + print(" → Go to: https://dash.cloudflare.com") + print(" → Domain: robotmcp.ai → DNS → Records") + print( + f" → Add: {config.robot_name} CNAME → .cfargotunnel.com" + ) if not results["tunnel_endpoints"] and results["dns"]: - print(f" 1. Check cloudflared logs: tail -f ~/.simple-mcp-server/cloudflared.log") - print(f" 2. Verify server is running: curl http://localhost:8766/health") + print( + " 1. Check cloudflared logs: tail -f ~/.simple-mcp-server/cloudflared.log" + ) + print(" 2. Verify server is running: curl http://localhost:8766/health") if not results["server_local"]: - print(f" 1. Start server: simple-mcp-server start") - print(f" 2. Re-run verification: simple-mcp-server verify") - + print(" 1. Start server: simple-mcp-server start") + print(" 2. Re-run verification: simple-mcp-server verify") + print("=" * 70 + "\n") @@ -1124,6 +1143,7 @@ def cmd_help(): # ============== Main Entry Point ============== + def main(): """Main entry point for CLI.""" # Internal command for Windows daemon subprocess @@ -1152,15 +1172,25 @@ def main(): simple-mcp-server start simple-mcp-server status simple-mcp-server stop -""" +""", ) parser.add_argument( "command", nargs="?", default="start", - choices=["start", "stop", "restart", "status", "login", "logout", "verify", "version", "help"], - help="Command to run (default: start)" + choices=[ + "start", + "stop", + "restart", + "status", + "login", + "logout", + "verify", + "version", + "help", + ], + help="Command to run (default: start)", ) # Legacy flags for backward compatibility diff --git a/config.py b/config.py index eae3065..0f89421 100644 --- a/config.py +++ b/config.py @@ -1,4 +1,5 @@ """Config management for simple-mcp-server.""" + import json import os from pathlib import Path @@ -65,7 +66,9 @@ def load_config() -> Config: return Config() -def save_config(user_id: str, email: str, access_token: str, refresh_token: str = None) -> None: +def save_config( + user_id: str, email: str, access_token: str, refresh_token: str = None +) -> None: """Save config to file.""" CONFIG_DIR.mkdir(parents=True, exist_ok=True) diff --git a/logging_config.py b/logging_config.py index 3aca8ea..583a826 100644 --- a/logging_config.py +++ b/logging_config.py @@ -7,14 +7,12 @@ """ import atexit -import json import logging import os import re import sys import threading import time -from datetime import datetime, timezone from queue import Queue, Empty from typing import Optional @@ -31,7 +29,7 @@ def format(self, record: logging.LogRecord) -> dict: # Extract tag from message if present: [TAG] message tag = None message = record.getMessage() - tag_match = re.match(r'\[([A-Z_]+)\]\s*(.*)', message) + tag_match = re.match(r"\[([A-Z_]+)\]\s*(.*)", message) if tag_match: tag = tag_match.group(1) message = tag_match.group(2) @@ -47,7 +45,7 @@ def format(self, record: logging.LogRecord) -> dict: "extra": { "function": record.funcName, "line": record.lineno, - } + }, } # Add exception info if present @@ -62,8 +60,7 @@ class PlainFormatter(logging.Formatter): def __init__(self): super().__init__( - fmt='%(asctime)s [%(levelname)s] %(message)s', - datefmt='%Y-%m-%d %H:%M:%S' + fmt="%(asctime)s [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S" ) @@ -81,10 +78,18 @@ class SupabaseFilter(logging.Filter): """ # Modules to exclude (library noise) - EXCLUDED_MODULES = {'_client', 'server', 'streamable_http_manager'} + EXCLUDED_MODULES = {"_client", "server", "streamable_http_manager"} # Tags we want to keep (business events) - ALLOWED_TAGS = {'TOOL', 'LOGIN', 'CONSENT', 'REGISTER', 'TOKEN', 'AUTHORIZE', 'STARTUP'} + ALLOWED_TAGS = { + "TOOL", + "LOGIN", + "CONSENT", + "REGISTER", + "TOKEN", + "AUTHORIZE", + "STARTUP", + } def filter(self, record: logging.LogRecord) -> bool: # Always allow WARNING and above @@ -97,11 +102,11 @@ def filter(self, record: logging.LogRecord) -> bool: # Check for tag in message message = record.getMessage() - tag_match = re.match(r'\[([A-Z_]+)\]', message) + tag_match = re.match(r"\[([A-Z_]+)\]", message) if tag_match: tag = tag_match.group(1) # Skip AUTH "Request authorized" (too noisy - logs every request) - if tag == 'AUTH' and 'Request authorized' in message: + if tag == "AUTH" and "Request authorized" in message: return False return tag in self.ALLOWED_TAGS @@ -153,7 +158,7 @@ def emit(self, record: logging.LogRecord): "tag": None, "message": record.getMessage(), "module": record.module, - "extra": {} + "extra": {}, } self._queue.put(log_entry) @@ -203,7 +208,7 @@ def close(self): def setup_logging( robot_name: str = None, user_id: str = None, - supabase_client = None, + supabase_client=None, ) -> logging.Logger: """Configure logging with optional Supabase integration. diff --git a/main.py b/main.py index 91a7d86..18f549a 100644 --- a/main.py +++ b/main.py @@ -11,6 +11,7 @@ MCP clients (ChatGPT, Claude, etc.) connect directly to this server via Cloudflare tunnel. Railway is NOT involved in MCP traffic. """ + import os import logging from importlib.metadata import version as get_version @@ -62,7 +63,8 @@ local_config = load_config() # Initialize logging with Supabase support (centralized log collection) -from logging_config import setup_logging +from logging_config import setup_logging # noqa: E402 + setup_logging( robot_name=local_config.robot_name, user_id=local_config.user_id, @@ -70,18 +72,22 @@ ) logger = logging.getLogger(__name__) -logger.info(f"[STARTUP] Config loaded - valid: {local_config.is_valid()}, email: {local_config.email}") +logger.info( + f"[STARTUP] Config loaded - valid: {local_config.is_valid()}, email: {local_config.email}" +) # SERVER_URL: Use tunnel URL if available (for local MCP server), otherwise fallback to env/default # This is critical for OAuth - MCP clients need to authenticate on THIS server, not Railway -SERVER_URL = local_config.tunnel_url or os.getenv("SERVER_URL", "https://simplemcpserver-production-e610.up.railway.app") +SERVER_URL = local_config.tunnel_url or os.getenv( + "SERVER_URL", "https://simplemcpserver-production-e610.up.railway.app" +) logger.info(f"[STARTUP] SERVER_URL: {SERVER_URL}") logger.info(f"[STARTUP] OAuth enabled: {ENABLE_OAUTH}") logger.info("[STARTUP] Submodule auto-discovery enabled") # ============== FastMCP Server ============== -from fastmcp import FastMCP -from submodule_integration import register_all_submodules +from fastmcp import FastMCP # noqa: E402 +from submodule_integration import register_all_submodules # noqa: E402 # Create MCP instance mcp = FastMCP("simple-mcp-server") @@ -90,7 +96,7 @@ register_all_submodules(mcp) # ============== OAuth Authentication Middleware for MCP ============== -from oauth.middleware import MCPOAuthMiddleware +from oauth.middleware import MCPOAuthMiddleware # noqa: E402 # ============== Streamable HTTP MCP App ============== # Create FastMCP app with OAuth middleware BEFORE FastAPI app @@ -103,7 +109,7 @@ mcp_http_app = mcp.http_app( path="/", # Route at root of mounted app transport="streamable-http", - middleware=[Middleware(MCPOAuthMiddleware)] if ENABLE_OAUTH else [] + middleware=[Middleware(MCPOAuthMiddleware)] if ENABLE_OAUTH else [], ) # ============== FastAPI App ============== @@ -132,11 +138,13 @@ # OAuth endpoints (optional) if ENABLE_OAUTH: from oauth.endpoints import router as oauth_router, init_oauth_routes + init_oauth_routes(SERVER_URL, supabase) app.include_router(oauth_router) # Legacy SSE endpoints -from sse import router as sse_router, init_sse_routes +from sse import router as sse_router, init_sse_routes # noqa: E402 + init_sse_routes(SERVER_URL, local_config, mcp) app.include_router(sse_router) @@ -145,6 +153,7 @@ # ============== Server Info Endpoints ============== + @app.get("/health") async def health_check(): """Health check endpoint for Railway.""" @@ -167,12 +176,12 @@ async def root(): "fallback": "/sse (use if /mcp doesn't work)", }, "tools": "Auto-discovered from submodules", - "oauth_enabled": ENABLE_OAUTH + "oauth_enabled": ENABLE_OAUTH, } if ENABLE_OAUTH: response["oauth"] = { "protected_resource": f"{SERVER_URL}/.well-known/oauth-protected-resource", - "authorization_server": f"{SERVER_URL}/.well-known/oauth-authorization-server" + "authorization_server": f"{SERVER_URL}/.well-known/oauth-authorization-server", } return response @@ -181,7 +190,8 @@ async def root(): if __name__ == "__main__": import uvicorn + logger.info(f"Starting MCP server with transport: {MCP_TRANSPORT}") - logger.info(f"Streamable HTTP endpoint: /mcp") - logger.info(f"Legacy SSE endpoint: /sse") + logger.info("Streamable HTTP endpoint: /mcp") + logger.info("Legacy SSE endpoint: /sse") uvicorn.run(app, host=MCP_HOST, port=MCP_PORT) diff --git a/oauth/endpoints.py b/oauth/endpoints.py index 9d73185..b653991 100644 --- a/oauth/endpoints.py +++ b/oauth/endpoints.py @@ -52,6 +52,7 @@ def init_oauth_routes(server_url: str, supabase_client): # ============== OAuth 2.1 Discovery Endpoints ============== + @router.get("/.well-known/oauth-protected-resource") async def oauth_protected_resource(): """OAuth 2.0 Protected Resource Metadata (RFC 9728).""" @@ -60,7 +61,7 @@ async def oauth_protected_resource(): "authorization_servers": [_server_url], "scopes_supported": ["mcp:tools", "mcp:read"], "bearer_methods_supported": ["header"], - "resource_documentation": f"{_server_url}/docs" + "resource_documentation": f"{_server_url}/docs", } @@ -78,51 +79,60 @@ async def oauth_authorization_server(): "grant_types_supported": ["authorization_code", "refresh_token"], "token_endpoint_auth_methods_supported": ["none", "client_secret_post"], "code_challenge_methods_supported": ["S256"], - "service_documentation": f"{_server_url}/docs" + "service_documentation": f"{_server_url}/docs", } # ============== Client Registration ============== + @router.post("/register") async def register_client(request: Request): """OAuth 2.0 Dynamic Client Registration (RFC 7591).""" try: data = await request.json() - except: + except Exception: data = {} client_id = secrets.token_urlsafe(24) client_secret = secrets.token_urlsafe(32) - logger.info(f"[REGISTER] Client registration request: {data.get('client_name', 'MCP Client')}") + logger.info( + f"[REGISTER] Client registration request: {data.get('client_name', 'MCP Client')}" + ) client_info = { "client_id": client_id, "client_secret": client_secret, "client_name": data.get("client_name", "MCP Client"), - "redirect_uris": data.get("redirect_uris", ["https://chatgpt.com/connector_platform_oauth_redirect"]), + "redirect_uris": data.get( + "redirect_uris", ["https://chatgpt.com/connector_platform_oauth_redirect"] + ), "grant_types": data.get("grant_types", ["authorization_code", "refresh_token"]), "response_types": data.get("response_types", ["code"]), "token_endpoint_auth_method": data.get("token_endpoint_auth_method", "none"), - "created_at": int(time.time()) + "created_at": int(time.time()), } registered_clients[client_id] = client_info logger.info(f"[REGISTER] Client registered: {client_id[:8]}...") - return JSONResponse({ - "client_id": client_id, - "client_secret": client_secret, - "client_name": client_info["client_name"], - "redirect_uris": client_info["redirect_uris"], - "grant_types": client_info["grant_types"], - "response_types": client_info["response_types"], - "token_endpoint_auth_method": client_info["token_endpoint_auth_method"] - }, status_code=201) + return JSONResponse( + { + "client_id": client_id, + "client_secret": client_secret, + "client_name": client_info["client_name"], + "redirect_uris": client_info["redirect_uris"], + "grant_types": client_info["grant_types"], + "response_types": client_info["response_types"], + "token_endpoint_auth_method": client_info["token_endpoint_auth_method"], + }, + status_code=201, + ) # ============== Authorization Flow ============== + @router.get("/authorize") async def authorize( request: Request, @@ -132,10 +142,12 @@ async def authorize( scope: str = "mcp:tools", state: str = "", code_challenge: str = "", - code_challenge_method: str = "S256" + code_challenge_method: str = "S256", ): """OAuth 2.0 Authorization Endpoint - redirects to login.""" - logger.info(f"[AUTHORIZE] Authorization request: client_id={client_id[:8] if client_id else 'none'}..., scope={scope}") + logger.info( + f"[AUTHORIZE] Authorization request: client_id={client_id[:8] if client_id else 'none'}..., scope={scope}" + ) if response_type != "code": return JSONResponse({"error": "unsupported_response_type"}, status_code=400) @@ -149,9 +161,11 @@ async def authorize( "code_challenge": code_challenge, "code_challenge_method": code_challenge_method, "created_at": int(time.time()), - "expires_at": int(time.time()) + 600 # 10 minutes + "expires_at": int(time.time()) + 600, # 10 minutes } - logger.info(f"[AUTHORIZE] Session created: {session_id[:8]}..., redirecting to login") + logger.info( + f"[AUTHORIZE] Session created: {session_id[:8]}..., redirecting to login" + ) # Redirect to login page return RedirectResponse(url=f"/login?session={session_id}", status_code=302) @@ -160,7 +174,9 @@ async def authorize( @router.get("/login") async def login_page(session: str = "", registered: str = ""): """Show login form.""" - logger.info(f"[LOGIN] Login page requested: session={session[:8] if session else 'none'}...") + logger.info( + f"[LOGIN] Login page requested: session={session[:8] if session else 'none'}..." + ) if not session or session not in pending_authorizations: return HTMLResponse("

Invalid or expired session

", status_code=400) @@ -168,21 +184,25 @@ async def login_page(session: str = "", registered: str = ""): auth_data = pending_authorizations[session] if time.time() > auth_data["expires_at"]: del pending_authorizations[session] - return HTMLResponse("

Session expired. Please try again.

", status_code=400) + return HTMLResponse( + "

Session expired. Please try again.

", status_code=400 + ) # Show success message if user just registered success_msg = "" if registered == "1": - success_msg = '
Account created successfully! Please sign in.
' + success_msg = ( + '
Account created successfully! Please sign in.
' + ) - return HTMLResponse(LOGIN_PAGE.format(session=session, error="", success=success_msg)) + return HTMLResponse( + LOGIN_PAGE.format(session=session, error="", success=success_msg) + ) @router.post("/login") async def login_submit( - session: str = Form(...), - email: str = Form(...), - password: str = Form(...) + session: str = Form(...), email: str = Form(...), password: str = Form(...) ): """Handle login form submission.""" if not session or session not in pending_authorizations: @@ -191,7 +211,9 @@ async def login_submit( auth_data = pending_authorizations[session] if time.time() > auth_data["expires_at"]: del pending_authorizations[session] - return HTMLResponse("

Session expired. Please try again.

", status_code=400) + return HTMLResponse( + "

Session expired. Please try again.

", status_code=400 + ) # Authenticate with Supabase if not _supabase: @@ -200,32 +222,41 @@ async def login_submit( return RedirectResponse(url=f"/consent?session={session}", status_code=302) try: - response = _supabase.auth.sign_in_with_password({ - "email": email, - "password": password - }) + response = _supabase.auth.sign_in_with_password( + {"email": email, "password": password} + ) if response.user: logger.info(f"[LOGIN] User authenticated: {response.user.email}") authenticated_sessions[session] = { "email": response.user.email, - "user_id": response.user.id + "user_id": response.user.id, } return RedirectResponse(url=f"/consent?session={session}", status_code=302) else: - logger.info(f"[LOGIN] Login failed: invalid credentials for session {session[:8]}...") + logger.info( + f"[LOGIN] Login failed: invalid credentials for session {session[:8]}..." + ) error_html = '
Invalid email or password
' - return HTMLResponse(LOGIN_PAGE.format(session=session, error=error_html, success="")) + return HTMLResponse( + LOGIN_PAGE.format(session=session, error=error_html, success="") + ) except Exception as e: - logger.info(f"[LOGIN] Login failed: authentication error for session {session[:8]}...") + logger.info( + f"[LOGIN] Login failed: authentication error for session {session[:8]}..." + ) error_html = f'
Authentication failed: {str(e)}
' - return HTMLResponse(LOGIN_PAGE.format(session=session, error=error_html, success="")) + return HTMLResponse( + LOGIN_PAGE.format(session=session, error=error_html, success="") + ) @router.get("/signup") async def signup_page(session: str = ""): """Show signup form.""" - logger.info(f"[SIGNUP] Signup page requested: session={session[:8] if session else 'none'}...") + logger.info( + f"[SIGNUP] Signup page requested: session={session[:8] if session else 'none'}..." + ) if not session or session not in pending_authorizations: return HTMLResponse("

Invalid or expired session

", status_code=400) @@ -233,7 +264,9 @@ async def signup_page(session: str = ""): auth_data = pending_authorizations[session] if time.time() > auth_data["expires_at"]: del pending_authorizations[session] - return HTMLResponse("

Session expired. Please try again.

", status_code=400) + return HTMLResponse( + "

Session expired. Please try again.

", status_code=400 + ) return HTMLResponse(SIGNUP_PAGE.format(session=session, error="")) @@ -243,7 +276,7 @@ async def signup_submit( session: str = Form(...), email: str = Form(...), password: str = Form(...), - confirm_password: str = Form(...) + confirm_password: str = Form(...), ): """Handle signup form submission.""" if not session or session not in pending_authorizations: @@ -252,7 +285,9 @@ async def signup_submit( auth_data = pending_authorizations[session] if time.time() > auth_data["expires_at"]: del pending_authorizations[session] - return HTMLResponse("

Session expired. Please try again.

", status_code=400) + return HTMLResponse( + "

Session expired. Please try again.

", status_code=400 + ) # Validate passwords match if password != confirm_password: @@ -267,34 +302,44 @@ async def signup_submit( # Create account with Supabase if not _supabase: # Fallback: just redirect to login if Supabase not configured - return RedirectResponse(url=f"/login?session={session}®istered=1", status_code=302) + return RedirectResponse( + url=f"/login?session={session}®istered=1", status_code=302 + ) try: - response = _supabase.auth.sign_up({ - "email": email, - "password": password - }) + response = _supabase.auth.sign_up({"email": email, "password": password}) if response.user: logger.info(f"[SIGNUP] Account created: {email}") - return RedirectResponse(url=f"/login?session={session}®istered=1", status_code=302) + return RedirectResponse( + url=f"/login?session={session}®istered=1", status_code=302 + ) else: - logger.info(f"[SIGNUP] Account creation failed for session: {session[:8]}...") + logger.info( + f"[SIGNUP] Account creation failed for session: {session[:8]}..." + ) error_html = '
Failed to create account
' return HTMLResponse(SIGNUP_PAGE.format(session=session, error=error_html)) except Exception as e: error_msg = str(e) if "already registered" in error_msg.lower(): - logger.info(f"[SIGNUP] Signup failed: email already exists for session: {session[:8]}...") - error_html = '
An account with this email already exists
' + logger.info( + f"[SIGNUP] Signup failed: email already exists for session: {session[:8]}..." + ) + error_html = ( + '
An account with this email already exists
' + ) else: - logger.info(f"[SIGNUP] Signup failed: {error_msg[:50]} for session: {session[:8]}...") + logger.info( + f"[SIGNUP] Signup failed: {error_msg[:50]} for session: {session[:8]}..." + ) error_html = f'
Signup failed: {error_msg}
' return HTMLResponse(SIGNUP_PAGE.format(session=session, error=error_html)) # ============== Consent ============== + @router.get("/consent") async def consent_page(session: str = ""): """Show consent/authorization page.""" @@ -306,17 +351,15 @@ async def consent_page(session: str = ""): user_info = authenticated_sessions[session] logger.info(f"[CONSENT] Consent page shown to user: {user_info.get('email')}") - return HTMLResponse(CONSENT_PAGE.format( - session=session, - user_email=user_info.get("email", "Unknown") - )) + return HTMLResponse( + CONSENT_PAGE.format( + session=session, user_email=user_info.get("email", "Unknown") + ) + ) @router.post("/consent") -async def consent_submit( - session: str = Form(...), - action: str = Form(...) -): +async def consent_submit(session: str = Form(...), action: str = Form(...)): """Handle consent form submission.""" from urllib.parse import urlencode @@ -337,7 +380,9 @@ async def consent_submit( params = {"error": "access_denied", "error_description": "User denied access"} if state: params["state"] = state - return RedirectResponse(url=f"{redirect_uri}?{urlencode(params)}", status_code=302) + return RedirectResponse( + url=f"{redirect_uri}?{urlencode(params)}", status_code=302 + ) # User approved - generate authorization code user_info = authenticated_sessions.get(session, {}) @@ -353,9 +398,11 @@ async def consent_submit( "user_id": user_info.get("user_id"), "user_email": user_info.get("email"), "created_at": int(time.time()), - "expires_at": int(time.time()) + 600 # 10 minutes + "expires_at": int(time.time()) + 600, # 10 minutes } - logger.info(f"[CONSENT] Authorization code issued for client: {auth_data['client_id'][:8]}...") + logger.info( + f"[CONSENT] Authorization code issued for client: {auth_data['client_id'][:8]}..." + ) # Clean up session data del pending_authorizations[session] @@ -371,6 +418,7 @@ async def consent_submit( # ============== Token Endpoint ============== + @router.post("/token") async def token( request: Request, @@ -380,7 +428,7 @@ async def token( client_id: str = Form(None), client_secret: str = Form(None), code_verifier: str = Form(None), - refresh_token: str = Form(None) + refresh_token: str = Form(None), ): """OAuth 2.0 Token Endpoint.""" # Handle form data or JSON @@ -389,15 +437,15 @@ async def token( data = await request.json() grant_type = data.get("grant_type") code = data.get("code") - redirect_uri = data.get("redirect_uri") client_id = data.get("client_id") - client_secret = data.get("client_secret") code_verifier = data.get("code_verifier") refresh_token = data.get("refresh_token") - except: + except Exception: return JSONResponse({"error": "invalid_request"}, status_code=400) - logger.info(f"[TOKEN] Token request: grant_type={grant_type}, client_id={client_id[:8] if client_id else 'none'}...") + logger.info( + f"[TOKEN] Token request: grant_type={grant_type}, client_id={client_id[:8] if client_id else 'none'}..." + ) if grant_type == "authorization_code": if not code or code not in authorization_codes: @@ -410,17 +458,30 @@ async def token( if time.time() > auth_data["expires_at"]: del authorization_codes[code] logger.info("[TOKEN] Token request failed: authorization code expired") - return JSONResponse({"error": "invalid_grant", "error_description": "Code expired"}, status_code=400) + return JSONResponse( + {"error": "invalid_grant", "error_description": "Code expired"}, + status_code=400, + ) # Verify PKCE if auth_data.get("code_challenge") and code_verifier: - expected = base64.urlsafe_b64encode( - hashlib.sha256(code_verifier.encode()).digest() - ).rstrip(b"=").decode() + expected = ( + base64.urlsafe_b64encode( + hashlib.sha256(code_verifier.encode()).digest() + ) + .rstrip(b"=") + .decode() + ) if expected != auth_data["code_challenge"]: logger.info("[TOKEN] Token request failed: PKCE verification failed") - return JSONResponse({"error": "invalid_grant", "error_description": "PKCE verification failed"}, status_code=400) + return JSONResponse( + { + "error": "invalid_grant", + "error_description": "PKCE verification failed", + }, + status_code=400, + ) # Generate JWT tokens (stateless - no storage needed) user_id = auth_data.get("user_id") or "" @@ -434,7 +495,7 @@ async def token( client_id=client_id or "", scope=scope, issuer=_server_url, - expires_in=expires_in + expires_in=expires_in, ) new_refresh_token = create_refresh_token( @@ -442,7 +503,7 @@ async def token( user_email=user_email, client_id=client_id or "", scope=scope, - issuer=_server_url + issuer=_server_url, ) logger.info(f"[TOKEN] JWT access token created for user: {user_email}") @@ -450,13 +511,15 @@ async def token( # Clean up used code del authorization_codes[code] - return JSONResponse({ - "access_token": new_access_token, - "token_type": "Bearer", - "expires_in": expires_in, - "refresh_token": new_refresh_token, - "scope": auth_data["scope"] - }) + return JSONResponse( + { + "access_token": new_access_token, + "token_type": "Bearer", + "expires_in": expires_in, + "refresh_token": new_refresh_token, + "scope": auth_data["scope"], + } + ) elif grant_type == "refresh_token": # Verify the refresh token (JWT-based, stateless) @@ -464,7 +527,13 @@ async def token( if not token_data: logger.info("[TOKEN] Refresh token failed: invalid or expired token") - return JSONResponse({"error": "invalid_grant", "error_description": "Invalid or expired refresh token"}, status_code=400) + return JSONResponse( + { + "error": "invalid_grant", + "error_description": "Invalid or expired refresh token", + }, + status_code=400, + ) # Extract user info from the verified token user_id = token_data.get("sub", "") @@ -479,7 +548,7 @@ async def token( client_id=client_id or "", scope=scope, issuer=_server_url, - expires_in=expires_in + expires_in=expires_in, ) new_refresh_token = create_refresh_token( @@ -487,17 +556,21 @@ async def token( user_email=user_email, client_id=client_id or "", scope=scope, - issuer=_server_url + issuer=_server_url, ) - logger.info(f"[TOKEN] JWT refresh successful for user: {user_email}, client: {client_id[:8] if client_id else 'none'}...") + logger.info( + f"[TOKEN] JWT refresh successful for user: {user_email}, client: {client_id[:8] if client_id else 'none'}..." + ) - return JSONResponse({ - "access_token": new_access_token, - "token_type": "Bearer", - "expires_in": expires_in, - "refresh_token": new_refresh_token, - "scope": scope - }) + return JSONResponse( + { + "access_token": new_access_token, + "token_type": "Bearer", + "expires_in": expires_in, + "refresh_token": new_refresh_token, + "scope": scope, + } + ) return JSONResponse({"error": "unsupported_grant_type"}, status_code=400) diff --git a/oauth/jwt_utils.py b/oauth/jwt_utils.py index df0b12a..9b79ef4 100644 --- a/oauth/jwt_utils.py +++ b/oauth/jwt_utils.py @@ -73,7 +73,7 @@ def create_access_token( client_id: str, scope: str, issuer: str, - expires_in: int = ACCESS_TOKEN_EXPIRE_SECONDS + expires_in: int = ACCESS_TOKEN_EXPIRE_SECONDS, ) -> str: """Create a JWT access token. @@ -94,14 +94,14 @@ def create_access_token( now = int(time.time()) payload = { - "sub": user_id, # Subject (user ID) - standard claim - "email": user_email, # User email - "client_id": client_id, # OAuth client - "scope": scope, # OAuth scope - "iss": issuer, # Issuer - standard claim - "iat": now, # Issued at - standard claim - "exp": now + expires_in, # Expiration - standard claim - "type": "access" # Token type + "sub": user_id, # Subject (user ID) - standard claim + "email": user_email, # User email + "client_id": client_id, # OAuth client + "scope": scope, # OAuth scope + "iss": issuer, # Issuer - standard claim + "iat": now, # Issued at - standard claim + "exp": now + expires_in, # Expiration - standard claim + "type": "access", # Token type } token = jwt.encode(payload, secret, algorithm=JWT_ALGORITHM) @@ -114,7 +114,7 @@ def create_refresh_token( client_id: str, scope: str, issuer: str, - expires_in: int = REFRESH_TOKEN_EXPIRE_SECONDS + expires_in: int = REFRESH_TOKEN_EXPIRE_SECONDS, ) -> str: """Create a JWT refresh token. @@ -135,14 +135,14 @@ def create_refresh_token( now = int(time.time()) payload = { - "sub": user_id, # Subject (user ID) - "email": user_email, # User email - "client_id": client_id, # OAuth client - "scope": scope, # OAuth scope - "iss": issuer, # Issuer - "iat": now, # Issued at - "exp": now + expires_in, # Expiration - "type": "refresh" # Token type + "sub": user_id, # Subject (user ID) + "email": user_email, # User email + "client_id": client_id, # OAuth client + "scope": scope, # OAuth scope + "iss": issuer, # Issuer + "iat": now, # Issued at + "exp": now + expires_in, # Expiration + "type": "refresh", # Token type } token = jwt.encode(payload, secret, algorithm=JWT_ALGORITHM) @@ -170,14 +170,11 @@ def verify_access_token(token: str, issuer: str = None) -> Optional[dict]: secret, algorithms=[JWT_ALGORITHM], options=options, - issuer=issuer + issuer=issuer, ) else: payload = jwt.decode( - token, - secret, - algorithms=[JWT_ALGORITHM], - options=options + token, secret, algorithms=[JWT_ALGORITHM], options=options ) # Verify it's an access token @@ -215,14 +212,11 @@ def verify_refresh_token(token: str, issuer: str = None) -> Optional[dict]: secret, algorithms=[JWT_ALGORITHM], options=options, - issuer=issuer + issuer=issuer, ) else: payload = jwt.decode( - token, - secret, - algorithms=[JWT_ALGORITHM], - options=options + token, secret, algorithms=[JWT_ALGORITHM], options=options ) # Verify it's a refresh token diff --git a/oauth/middleware.py b/oauth/middleware.py index e01f00b..8f84742 100644 --- a/oauth/middleware.py +++ b/oauth/middleware.py @@ -28,7 +28,9 @@ def get_server_url() -> str: """Get the server URL for OAuth metadata.""" - return _config.tunnel_url or os.getenv("SERVER_URL", "https://simplemcpserver-production-e610.up.railway.app") + return _config.tunnel_url or os.getenv( + "SERVER_URL", "https://simplemcpserver-production-e610.up.railway.app" + ) async def check_shared_access(robot_name: str, user_id: str) -> bool: @@ -37,7 +39,7 @@ async def check_shared_access(robot_name: str, user_id: str) -> bool: async with httpx.AsyncClient(timeout=5.0) as client: response = await client.post( f"{ROBOTMCP_CLOUD_URL}/api/check-access", - params={"robot_name": robot_name, "user_id": user_id} + params={"robot_name": robot_name, "user_id": user_id}, ) if response.status_code == 200: data = response.json() @@ -58,9 +60,14 @@ async def dispatch(self, request: Request, call_next): if not auth_header.startswith("Bearer "): logger.info("[AUTH] Request rejected: no Bearer token") return JSONResponse( - {"error": "unauthorized", "error_description": "Missing or invalid Authorization header"}, + { + "error": "unauthorized", + "error_description": "Missing or invalid Authorization header", + }, status_code=401, - headers={"WWW-Authenticate": f'Bearer resource_metadata="{server_url}/.well-known/oauth-protected-resource"'} + headers={ + "WWW-Authenticate": f'Bearer resource_metadata="{server_url}/.well-known/oauth-protected-resource"' + }, ) token = auth_header[7:] @@ -71,9 +78,14 @@ async def dispatch(self, request: Request, call_next): if not token_data: logger.info("[AUTH] Request rejected: invalid or expired token") return JSONResponse( - {"error": "unauthorized", "error_description": "Invalid or expired token"}, + { + "error": "unauthorized", + "error_description": "Invalid or expired token", + }, status_code=401, - headers={"WWW-Authenticate": f'Bearer resource_metadata="{server_url}/.well-known/oauth-protected-resource"'} + headers={ + "WWW-Authenticate": f'Bearer resource_metadata="{server_url}/.well-known/oauth-protected-resource"' + }, ) # Check authorization (creator or shared member) @@ -88,11 +100,18 @@ async def dispatch(self, request: Request, call_next): # Check if user is a shared member via robotmcp-cloud API if _config.robot_name: if await check_shared_access(_config.robot_name, connecting_user_id): - logger.info(f"[AUTH] Request authorized (shared member): {token_data.get('email')}") + logger.info( + f"[AUTH] Request authorized (shared member): {token_data.get('email')}" + ) return await call_next(request) - logger.warning(f"[AUTH] Access denied: user {connecting_user_id} is not authorized") + logger.warning( + f"[AUTH] Access denied: user {connecting_user_id} is not authorized" + ) return JSONResponse( - {"error": "forbidden", "error_description": "Access denied: not authorized for this server"}, - status_code=403 + { + "error": "forbidden", + "error_description": "Access denied: not authorized for this server", + }, + status_code=403, ) diff --git a/setup.py b/setup.py index b99561f..d4c6129 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ """Setup flow for simple-mcp-server (browser-based login).""" + import os import re import secrets @@ -26,7 +27,7 @@ def is_wsl() -> bool: version = f.read().lower() if "microsoft" in version or "wsl" in version: return True - except: + except Exception: pass # Also check WSL_DISTRO_NAME environment variable if os.environ.get("WSL_DISTRO_NAME"): @@ -43,7 +44,7 @@ def open_browser(url: str) -> None: result = subprocess.run( ["powershell.exe", "-Command", f'Start-Process "{url}"'], capture_output=True, - timeout=5 + timeout=5, ) if result.returncode == 0: return @@ -52,11 +53,7 @@ def open_browser(url: str) -> None: # Fallback: try wslview (from wslu package) try: - result = subprocess.run( - ["wslview", url], - capture_output=True, - timeout=5 - ) + result = subprocess.run(["wslview", url], capture_output=True, timeout=5) if result.returncode == 0: return except (FileNotFoundError, subprocess.TimeoutExpired): @@ -65,9 +62,7 @@ def open_browser(url: str) -> None: # Last resort: cmd.exe with quoted URL try: subprocess.run( - ["cmd.exe", "/c", "start", "", url], - capture_output=True, - timeout=5 + ["cmd.exe", "/c", "start", "", url], capture_output=True, timeout=5 ) return except (FileNotFoundError, subprocess.TimeoutExpired): @@ -80,7 +75,7 @@ def open_browser(url: str) -> None: # Suppress stderr temporarily to hide gio errors old_stderr = sys.stderr try: - sys.stderr = open(os.devnull, 'w') + sys.stderr = open(os.devnull, "w") webbrowser.open(url) finally: sys.stderr.close() @@ -101,18 +96,21 @@ def get_wsl_ip() -> str: ips = result.stdout.strip().split() if ips: return ips[0] - except: + except Exception: pass # Fallback: try ip addr to get eth0 IP try: - result = subprocess.run(["ip", "addr", "show", "eth0"], capture_output=True, text=True) + result = subprocess.run( + ["ip", "addr", "show", "eth0"], capture_output=True, text=True + ) if result.returncode == 0: import re - match = re.search(r'inet (\d+\.\d+\.\d+\.\d+)', result.stdout) + + match = re.search(r"inet (\d+\.\d+\.\d+\.\d+)", result.stdout) if match: return match.group(1) - except: + except Exception: pass return "" @@ -217,7 +215,7 @@ def validate_robot_name(name: str) -> tuple[bool, str]: return False, "Robot name must be at least 3 characters" if len(name) > 32: return False, "Robot name must be at most 32 characters" - if not re.match(r'^[a-z0-9]+(-[a-z0-9]+)*$', name): + if not re.match(r"^[a-z0-9]+(-[a-z0-9]+)*$", name): return False, "Use only lowercase letters, numbers, and hyphens" return True, "" @@ -249,9 +247,7 @@ def fetch_servers(access_token: str) -> dict: """ try: response = requests.get( - f"{SERVER_URL}/servers", - params={"access_token": access_token}, - timeout=30 + f"{SERVER_URL}/servers", params={"access_token": access_token}, timeout=30 ) return response.json() except requests.RequestException as e: @@ -274,7 +270,9 @@ def select_server(owned: list, shared: list) -> dict | None: for i, server in enumerate(owned): all_servers.append(server) status = "active" if server.get("is_active", True) else "inactive" - print(f" [{len(all_servers)}] {server['robot_name']}.robotmcp.ai ({status})") + print( + f" [{len(all_servers)}] {server['robot_name']}.robotmcp.ai ({status})" + ) if shared: print("\n--- Shared With You ---") @@ -282,7 +280,7 @@ def select_server(owned: list, shared: list) -> dict | None: all_servers.append(server) print(f" [{len(all_servers)}] {server['robot_name']}.robotmcp.ai") - print(f"\n [0] Create a new server") + print("\n [0] Create a new server") print() while True: @@ -300,7 +298,9 @@ def select_server(owned: list, shared: list) -> dict | None: print(" Please enter a number") -def create_tunnel(robot_name: str, user_id: str, access_token: str, force: bool = False) -> dict: +def create_tunnel( + robot_name: str, user_id: str, access_token: str, force: bool = False +) -> dict: """Call Railway API to create Cloudflare tunnel. Args: @@ -322,9 +322,9 @@ def create_tunnel(robot_name: str, user_id: str, access_token: str, force: bool "robot_name": robot_name, "user_id": user_id, "access_token": access_token, - "force": "true" if force else "false" + "force": "true" if force else "false", }, - timeout=60 + timeout=60, ) return response.json() except requests.RequestException as e: @@ -354,7 +354,9 @@ def run_login_flow() -> bool: callback_host = "127.0.0.1" # Build login URL with the appropriate callback host - login_url = f"{SERVER_URL}/cli-login?session={session_id}&port={port}&host={callback_host}" + login_url = ( + f"{SERVER_URL}/cli-login?session={session_id}&port={port}&host={callback_host}" + ) print("Opening browser for login...") print(f"If browser doesn't open, visit:\n {login_url}\n") @@ -394,7 +396,7 @@ def run_login_flow() -> bool: refresh_token=result.get("refresh_token"), ) print(f"\n[OK] Logged in as: {result['email']}") - print(f" Config saved to: ~/.simple-mcp-server/config.json") + print(" Config saved to: ~/.simple-mcp-server/config.json") # Fetch existing servers print("\nChecking for existing servers...") @@ -413,17 +415,19 @@ def run_login_flow() -> bool: update_config_tunnel( robot_name=selected["robot_name"], tunnel_token=selected["tunnel_token"], - tunnel_url=selected["tunnel_url"] + tunnel_url=selected["tunnel_url"], ) print(f"\n[OK] Selected server: {selected['tunnel_url']}") - print(f" Tunnel token saved to config.\n") + print(" Tunnel token saved to config.\n") return True # else: user wants to create new server, continue below else: print(" No existing servers found.") else: # Failed to fetch servers - continue with new server flow - print(f" Could not fetch servers: {servers_result.get('error', 'Unknown error')}") + print( + f" Could not fetch servers: {servers_result.get('error', 'Unknown error')}" + ) print() # Spacing before robot name prompt @@ -435,48 +439,57 @@ def run_login_flow() -> bool: tunnel_result = create_tunnel( robot_name=robot_name, user_id=result["user_id"], - access_token=result["access_token"] + access_token=result["access_token"], ) if tunnel_result.get("success"): update_config_tunnel( robot_name=robot_name, tunnel_token=tunnel_result["tunnel_token"], - tunnel_url=tunnel_result["tunnel_url"] + tunnel_url=tunnel_result["tunnel_url"], ) print(f"[OK] Tunnel created: {tunnel_result['tunnel_url']}") - print(f" Tunnel token saved to config.\n") + print(" Tunnel token saved to config.\n") return True else: error = tunnel_result.get("error", "Unknown error") print(f"[X] Tunnel creation failed: {error}") # Check if name is taken - if "already taken" in error.lower() or "already exists" in error.lower(): + if ( + "already taken" in error.lower() + or "already exists" in error.lower() + ): # Check if owned by same user - offer to reuse if tunnel_result.get("owned_by_user"): - print(f"\n You already own the tunnel '{robot_name}.robotmcp.ai'.") + print( + f"\n You already own the tunnel '{robot_name}.robotmcp.ai'." + ) reuse = input(" Reuse this tunnel? (y/n): ").strip().lower() - if reuse == 'y': + if reuse == "y": print(f"\nReusing tunnel {robot_name}.robotmcp.ai...") # Retry with force=True to get existing tunnel tunnel_result = create_tunnel( robot_name=robot_name, user_id=result["user_id"], access_token=result["access_token"], - force=True + force=True, ) if tunnel_result.get("success"): update_config_tunnel( robot_name=robot_name, tunnel_token=tunnel_result["tunnel_token"], - tunnel_url=tunnel_result["tunnel_url"] + tunnel_url=tunnel_result["tunnel_url"], ) - print(f"[OK] Tunnel reused: {tunnel_result['tunnel_url']}") - print(f" Tunnel token saved to config.\n") + print( + f"[OK] Tunnel reused: {tunnel_result['tunnel_url']}" + ) + print(" Tunnel token saved to config.\n") return True else: - print(f"[X] Failed to reuse tunnel: {tunnel_result.get('error')}") + print( + f"[X] Failed to reuse tunnel: {tunnel_result.get('error')}" + ) print(" Please try a different name.\n") continue else: diff --git a/sse.py b/sse.py index d7a1b29..f633525 100644 --- a/sse.py +++ b/sse.py @@ -53,15 +53,14 @@ def unauthorized_response(error_description: str) -> JSONResponse: status_code=401, headers={ "WWW-Authenticate": f'Bearer resource_metadata="{_server_url}/.well-known/oauth-protected-resource"' - } + }, ) def forbidden_response(error_description: str) -> JSONResponse: """Return 403 Forbidden response for unauthorized access.""" return JSONResponse( - {"error": "forbidden", "error_description": error_description}, - status_code=403 + {"error": "forbidden", "error_description": error_description}, status_code=403 ) @@ -83,13 +82,14 @@ async def check_authorization(token_data: dict) -> bool: robot_name = _local_config.robot_name if _local_config else None if robot_name: if await check_shared_access(robot_name, connecting_user_id): - logger.info(f"[SSE] Request authorized (shared member): {token_data.get('email')}") + logger.info( + f"[SSE] Request authorized (shared member): {token_data.get('email')}" + ) return True logger.warning(f"[SSE] Access denied: user {connecting_user_id} is not authorized") raise HTTPException( - status_code=403, - detail="Access denied: not authorized for this server" + status_code=403, detail="Access denied: not authorized for this server" ) diff --git a/submodule_deps.py b/submodule_deps.py index 8a788d9..34e9b8e 100644 --- a/submodule_deps.py +++ b/submodule_deps.py @@ -3,6 +3,7 @@ This module scans .gitmodules for submodules with pyproject.toml files and ensures their dependencies are installed at server startup. """ + import configparser import subprocess import sys @@ -27,13 +28,15 @@ def parse_gitmodules(root: Path) -> list[dict]: for section in config.sections(): # Sections are like: submodule "ros-mcp-server" if section.startswith('submodule "') and section.endswith('"'): - name = section[len('submodule "'):-1] - path = config.get(section, 'path', fallback=None) + name = section[len('submodule "') : -1] + path = config.get(section, "path", fallback=None) if path: - submodules.append({ - 'name': name, - 'path': path, - }) + submodules.append( + { + "name": name, + "path": path, + } + ) return submodules @@ -55,9 +58,11 @@ def get_package_name_from_pyproject(pyproject_path: Path) -> str | None: def is_package_installed(package_name: str) -> bool: """Check if a package is installed.""" - installed = {dist.metadata['Name'].lower() for dist in distributions()} - return package_name.lower().replace('_', '-') in installed or \ - package_name.lower().replace('-', '_') in installed + installed = {dist.metadata["Name"].lower() for dist in distributions()} + return ( + package_name.lower().replace("_", "-") in installed + or package_name.lower().replace("-", "_") in installed + ) def install_submodule(submodule_path: Path, verbose: bool = True) -> bool: @@ -79,7 +84,7 @@ def install_submodule(submodule_path: Path, verbose: bool = True) -> bool: cmd, capture_output=True, text=True, - timeout=120 # 2 minute timeout + timeout=120, # 2 minute timeout ) if result.returncode == 0: return True @@ -93,7 +98,9 @@ def install_submodule(submodule_path: Path, verbose: bool = True) -> bool: return False -def discover_and_install_submodules(root: Path | None = None, verbose: bool = True) -> dict: +def discover_and_install_submodules( + root: Path | None = None, verbose: bool = True +) -> dict: """Discover git submodules with pyproject.toml and install missing ones. Args: @@ -107,10 +114,10 @@ def discover_and_install_submodules(root: Path | None = None, verbose: bool = Tr root = Path(__file__).parent result = { - 'found': [], - 'installed': [], - 'failed': [], - 'already_installed': [], + "found": [], + "installed": [], + "failed": [], + "already_installed": [], } # Parse .gitmodules @@ -120,16 +127,18 @@ def discover_and_install_submodules(root: Path | None = None, verbose: bool = Tr return result if verbose: - print(f"\nDiscovering submodule dependencies...") + print("\nDiscovering submodule dependencies...") for submodule in submodules: - submodule_path = root / submodule['path'] + submodule_path = root / submodule["path"] pyproject_path = submodule_path / "pyproject.toml" # Check if submodule directory exists if not submodule_path.exists(): if verbose: - print(f" [SKIP] {submodule['name']}: directory not found (run 'git submodule update --init')") + print( + f" [SKIP] {submodule['name']}: directory not found (run 'git submodule update --init')" + ) continue # Check if it has pyproject.toml @@ -138,7 +147,7 @@ def discover_and_install_submodules(root: Path | None = None, verbose: bool = Tr print(f" [SKIP] {submodule['name']}: no pyproject.toml found") continue - result['found'].append(submodule['name']) + result["found"].append(submodule["name"]) # Get package name from pyproject.toml package_name = get_package_name_from_pyproject(pyproject_path) @@ -149,7 +158,7 @@ def discover_and_install_submodules(root: Path | None = None, verbose: bool = Tr # Check if already installed if is_package_installed(package_name): - result['already_installed'].append(submodule['name']) + result["already_installed"].append(submodule["name"]) if verbose: print(f" [OK] {submodule['name']} ({package_name}): already installed") continue @@ -159,15 +168,15 @@ def discover_and_install_submodules(root: Path | None = None, verbose: bool = Tr print(f" [INSTALLING] {submodule['name']} ({package_name})...") if install_submodule(submodule_path, verbose): - result['installed'].append(submodule['name']) + result["installed"].append(submodule["name"]) if verbose: print(f" [OK] {submodule['name']}: installed successfully") else: - result['failed'].append(submodule['name']) + result["failed"].append(submodule["name"]) if verbose: print(f" [FAILED] {submodule['name']}: installation failed") - if verbose and (result['installed'] or result['failed']): + if verbose and (result["installed"] or result["failed"]): print() # Extra newline after installation return result @@ -184,7 +193,7 @@ def ensure_submodule_deps(root: Path | None = None) -> bool: result = discover_and_install_submodules(root, verbose=True) # Return False if any installations failed - return len(result['failed']) == 0 + return len(result["failed"]) == 0 if __name__ == "__main__": diff --git a/submodule_integration.py b/submodule_integration.py index c23986a..3a1882d 100644 --- a/submodule_integration.py +++ b/submodule_integration.py @@ -15,6 +15,7 @@ - /resources/__init__.py with register_all_resources(mcp, ...) - /prompts/__init__.py with register_all_prompts(mcp, ...) """ + import importlib import inspect import logging @@ -118,7 +119,9 @@ def _call_register_function( func(*args, **kwargs) return True except Exception as e: - logger.error(f"[INTEGRATION] Failed to call {func.__name__} for {submodule_name}: {e}") + logger.error( + f"[INTEGRATION] Failed to call {func.__name__} for {submodule_name}: {e}" + ) return False @@ -144,7 +147,9 @@ def _discover_and_register_submodule( if "register_function" in integration_config: func = _get_function_from_path(integration_config["register_function"]) if func: - logger.info(f"[INTEGRATION] Using custom register function for {submodule_name}") + logger.info( + f"[INTEGRATION] Using custom register function for {submodule_name}" + ) if _call_register_function(func, mcp, config, submodule_name): result["tools"] = result["resources"] = result["prompts"] = True return result @@ -153,7 +158,9 @@ def _discover_and_register_submodule( integration_module = _import_module_safe(f"{package_name}.integration") if integration_module and hasattr(integration_module, "register"): logger.info(f"[INTEGRATION] Found {package_name}.integration.register()") - if _call_register_function(integration_module.register, mcp, config, submodule_name): + if _call_register_function( + integration_module.register, mcp, config, submodule_name + ): result["tools"] = result["resources"] = result["prompts"] = True return result @@ -169,7 +176,9 @@ def _discover_and_register_submodule( # Look for register_all_resources in resources/__init__.py resources_module = _import_module_safe(f"{package_name}.resources") if resources_module and hasattr(resources_module, "register_all_resources"): - logger.info(f"[INTEGRATION] Found {package_name}.resources.register_all_resources()") + logger.info( + f"[INTEGRATION] Found {package_name}.resources.register_all_resources()" + ) result["resources"] = _call_register_function( resources_module.register_all_resources, mcp, config, submodule_name ) @@ -177,7 +186,9 @@ def _discover_and_register_submodule( # Look for register_all_prompts in prompts/__init__.py prompts_module = _import_module_safe(f"{package_name}.prompts") if prompts_module and hasattr(prompts_module, "register_all_prompts"): - logger.info(f"[INTEGRATION] Found {package_name}.prompts.register_all_prompts()") + logger.info( + f"[INTEGRATION] Found {package_name}.prompts.register_all_prompts()" + ) result["prompts"] = _call_register_function( prompts_module.register_all_prompts, mcp, config, submodule_name ) @@ -231,7 +242,9 @@ def discover_and_register_all( # Check if it has pyproject.toml if not pyproject_path.exists(): - logger.debug(f"[INTEGRATION] Submodule {submodule['name']} has no pyproject.toml") + logger.debug( + f"[INTEGRATION] Submodule {submodule['name']} has no pyproject.toml" + ) continue # Load pyproject.toml @@ -244,7 +257,9 @@ def discover_and_register_all( # Get package name package_name = get_package_name_from_pyproject(pyproject_path) if not package_name: - logger.warning(f"[INTEGRATION] Could not determine package name for {submodule['name']}") + logger.warning( + f"[INTEGRATION] Could not determine package name for {submodule['name']}" + ) continue # Normalize package name (replace - with _) @@ -261,7 +276,9 @@ def discover_and_register_all( # Copy config for this submodule submodule_config = config.copy() - logger.info(f"[INTEGRATION] Registering {submodule['name']} (package: {package_name})") + logger.info( + f"[INTEGRATION] Registering {submodule['name']} (package: {package_name})" + ) # Discover and register results[submodule["name"]] = _discover_and_register_submodule( @@ -273,10 +290,10 @@ def discover_and_register_all( ) # Log summary - registered_count = sum( - 1 for r in results.values() if any(r.values()) + registered_count = sum(1 for r in results.values() if any(r.values())) + logger.info( + f"[INTEGRATION] Registered {registered_count}/{len(results)} submodule(s)" ) - logger.info(f"[INTEGRATION] Registered {registered_count}/{len(results)} submodule(s)") return results