Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 41 additions & 7 deletions crates/retrochat-cli/src/commands/analytics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ use clap::Subcommand;
use std::sync::Arc;

use retrochat_core::database::DatabaseManager;
use retrochat_core::env::apis as env_vars;
use retrochat_core::env::{apis as env_vars, llm as env_llm};
use retrochat_core::models::OperationStatus;
use retrochat_core::services::{
google_ai::{GoogleAiClient, GoogleAiConfig},
llm::{LlmClientFactory, LlmConfig, LlmProvider},
AnalyticsRequestService,
};

Expand Down Expand Up @@ -67,21 +68,54 @@ pub enum AnalyticsCommands {

pub async fn handle_execute_command(
session_id: Option<String>,
provider: Option<String>,
model: Option<String>,
custom_prompt: Option<String>,
all: bool,
background: bool,
) -> Result<()> {
let db_path = retrochat_core::database::config::get_default_db_path()?;
let db_manager = Arc::new(DatabaseManager::new(&db_path).await?);

// Initialize Google AI client
let api_key = std::env::var(env_vars::GOOGLE_AI_API_KEY)
.context("GOOGLE_AI_API_KEY environment variable is required")?;
// Determine LLM provider from --provider flag or environment variable
let llm_provider: LlmProvider = if let Some(p) = provider.as_deref() {
p.parse::<LlmProvider>()
.map_err(|e| anyhow::anyhow!("{e}"))?
} else if let Ok(p) = std::env::var(env_llm::RETROCHAT_LLM_PROVIDER) {
p.parse::<LlmProvider>()
.map_err(|e| anyhow::anyhow!("{e}"))?
} else {
LlmProvider::GoogleAi
};

let config = GoogleAiConfig::new(api_key);
let google_ai_client = GoogleAiClient::new(config)?;
// Build LLM config
let mut config = match llm_provider {
LlmProvider::GoogleAi => {
let api_key = std::env::var(env_vars::GOOGLE_AI_API_KEY).context(
"GOOGLE_AI_API_KEY environment variable is required for google-ai provider",
)?;
LlmConfig::google_ai(api_key)
}
LlmProvider::ClaudeCode => LlmConfig::claude_code(),
LlmProvider::GeminiCli => LlmConfig::gemini_cli(),
};

let service = AnalyticsRequestService::new(db_manager, google_ai_client);
// Apply model if specified
if let Some(m) = model {
config = config.with_model(m);
}

// Create LLM client
let llm_client = LlmClientFactory::create(config).context("Failed to create LLM client")?;

// Display provider info
println!(
"Using LLM provider: {} (model: {})",
llm_client.provider_name(),
llm_client.model_name()
);

let service = AnalyticsRequestService::new_with_llm(db_manager, llm_client);

if all {
execute_analysis_for_all_sessions(&service, custom_prompt, background).await
Expand Down
51 changes: 43 additions & 8 deletions crates/retrochat-cli/src/commands/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -167,6 +167,14 @@ pub enum AnalysisCommands {
Run {
/// Session ID to analyze (if not provided, will prompt for selection)
session_id: Option<String>,
/// LLM provider to use for analysis (default: google-ai)
///
/// Options: google-ai, claude-code, gemini-cli
#[arg(long, short = 'P')]
provider: Option<String>,
/// Model to use (provider-specific)
#[arg(long, short = 'm')]
model: Option<String>,
/// Custom prompt for analysis
#[arg(long)]
custom_prompt: Option<String>,
Expand Down Expand Up @@ -219,6 +227,12 @@ pub enum SummarizeCommands {
/// Summarize all sessions
#[arg(long)]
all: bool,
/// LLM provider: google-ai (default), claude-code, gemini-cli
#[arg(long, short = 'P')]
provider: Option<String>,
/// Model identifier (provider-specific)
#[arg(long, short = 'm')]
model: Option<String>,
},

/// Generate session-level summaries from turn summaries
Expand All @@ -228,6 +242,12 @@ pub enum SummarizeCommands {
/// Summarize all sessions
#[arg(long)]
all: bool,
/// LLM provider: google-ai (default), claude-code, gemini-cli
#[arg(long, short = 'P')]
provider: Option<String>,
/// Model identifier (provider-specific)
#[arg(long, short = 'm')]
model: Option<String>,
},

/// Show summarization status for all sessions
Expand Down Expand Up @@ -310,12 +330,21 @@ pub async fn run_command(command: Commands) -> anyhow::Result<()> {
Commands::Analysis { command } => match command {
AnalysisCommands::Run {
session_id,
provider,
model,
custom_prompt,
all,
background,
} => {
self::analytics::handle_execute_command(session_id, custom_prompt, all, background)
.await
self::analytics::handle_execute_command(
session_id,
provider,
model,
custom_prompt,
all,
background,
)
.await
}

AnalysisCommands::Show { session_id, all } => {
Expand All @@ -337,12 +366,18 @@ pub async fn run_command(command: Commands) -> anyhow::Result<()> {
// Hierarchical Summarization
// ═══════════════════════════════════════════════════
Commands::Summarize { command } => match command {
SummarizeCommands::Turns { session_id, all } => {
self::summarize::handle_summarize_turns(session_id, all).await
}
SummarizeCommands::Sessions { session_id, all } => {
self::summarize::handle_summarize_sessions(session_id, all).await
}
SummarizeCommands::Turns {
session_id,
all,
provider,
model,
} => self::summarize::handle_summarize_turns(session_id, all, provider, model).await,
SummarizeCommands::Sessions {
session_id,
all,
provider,
model,
} => self::summarize::handle_summarize_sessions(session_id, all, provider, model).await,
SummarizeCommands::Status => self::summarize::handle_summarize_status().await,
},

Expand Down
100 changes: 77 additions & 23 deletions crates/retrochat-cli/src/commands/summarize.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,25 +3,80 @@ use std::sync::Arc;
use uuid::Uuid;

use retrochat_core::database::DatabaseManager;
use retrochat_core::env::apis as env_vars;
use retrochat_core::services::{
google_ai::{GoogleAiClient, GoogleAiConfig},
SessionSummarizer, TurnDetector, TurnSummarizer,
};
use retrochat_core::env::{apis as env_apis, llm as env_llm};
use retrochat_core::services::llm::{LlmClientFactory, LlmConfig, LlmProvider};
use retrochat_core::services::{SessionSummarizer, TurnDetector, TurnSummarizer};

/// Create an LLM client based on provider/model flags or environment variables
fn create_llm_client(
provider: Option<String>,
model: Option<String>,
) -> Result<Arc<dyn retrochat_core::services::llm::LlmClient>> {
// Determine provider from flag, env var, or default
let llm_provider: LlmProvider = if let Some(p) = provider.as_deref() {
p.parse::<LlmProvider>()
.map_err(|e| anyhow::anyhow!("{e}"))?
} else if let Ok(p) = std::env::var(env_llm::RETROCHAT_LLM_PROVIDER) {
p.parse::<LlmProvider>()
.map_err(|e| anyhow::anyhow!("{e}"))?
} else {
LlmProvider::GoogleAi
};

// Determine model from flag or env var
let model_name = model.or_else(|| std::env::var(env_llm::RETROCHAT_LLM_MODEL).ok());

// Build config based on provider
let mut config = match llm_provider {
LlmProvider::GoogleAi => {
let api_key = std::env::var(env_apis::GOOGLE_AI_API_KEY).context(
"GOOGLE_AI_API_KEY environment variable is required for google-ai provider",
)?;
LlmConfig::google_ai(api_key)
}
LlmProvider::ClaudeCode => {
let mut cfg = LlmConfig::claude_code();
if let Ok(path) = std::env::var(env_llm::CLAUDE_CODE_PATH) {
cfg = cfg.with_cli_path(path);
}
cfg
}
LlmProvider::GeminiCli => {
let mut cfg = LlmConfig::gemini_cli();
if let Ok(path) = std::env::var(env_llm::GEMINI_CLI_PATH) {
cfg = cfg.with_cli_path(path);
}
cfg
}
};

if let Some(m) = model_name {
config = config.with_model(m);
}

let client = LlmClientFactory::create(config)?;

println!(
"Using LLM provider: {} (model: {})",
client.provider_name(),
client.model_name()
);

Ok(client)
}

/// Handle the summarize turns command
pub async fn handle_summarize_turns(session_id: Option<String>, all: bool) -> Result<()> {
pub async fn handle_summarize_turns(
session_id: Option<String>,
all: bool,
provider: Option<String>,
model: Option<String>,
) -> Result<()> {
let db_path = retrochat_core::database::config::get_default_db_path()?;
let db_manager = Arc::new(DatabaseManager::new(&db_path).await?);

// Initialize Google AI client
let api_key = std::env::var(env_vars::GOOGLE_AI_API_KEY)
.context("GOOGLE_AI_API_KEY environment variable is required")?;

let config = GoogleAiConfig::new(api_key);
let ai_client = GoogleAiClient::new(config)?;

let summarizer = TurnSummarizer::new(&db_manager, ai_client);
let llm_client = create_llm_client(provider, model)?;
let summarizer = TurnSummarizer::new(&db_manager, llm_client);

if all {
summarize_all_sessions_turns(&db_manager, &summarizer).await
Expand Down Expand Up @@ -84,18 +139,17 @@ async fn summarize_all_sessions_turns(
}

/// Handle the summarize sessions command
pub async fn handle_summarize_sessions(session_id: Option<String>, all: bool) -> Result<()> {
pub async fn handle_summarize_sessions(
session_id: Option<String>,
all: bool,
provider: Option<String>,
model: Option<String>,
) -> Result<()> {
let db_path = retrochat_core::database::config::get_default_db_path()?;
let db_manager = Arc::new(DatabaseManager::new(&db_path).await?);

// Initialize Google AI client
let api_key = std::env::var(env_vars::GOOGLE_AI_API_KEY)
.context("GOOGLE_AI_API_KEY environment variable is required")?;

let config = GoogleAiConfig::new(api_key);
let ai_client = GoogleAiClient::new(config)?;

let summarizer = SessionSummarizer::new(&db_manager, ai_client);
let llm_client = create_llm_client(provider, model)?;
let summarizer = SessionSummarizer::new(&db_manager, llm_client);

if all {
summarize_all_sessions(&db_manager, &summarizer).await
Expand Down
16 changes: 16 additions & 0 deletions crates/retrochat-core/src/env.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,3 +44,19 @@ pub mod database {
/// Database file path (overrides default ~/.retrochat/retrochat.db)
pub const RETROCHAT_DB: &str = "RETROCHAT_DB";
}

/// LLM provider configuration
pub mod llm {
/// LLM provider to use for analysis
/// Options: "google-ai", "claude-code", "gemini-cli"
pub const RETROCHAT_LLM_PROVIDER: &str = "RETROCHAT_LLM_PROVIDER";

/// Model identifier for the selected provider
pub const RETROCHAT_LLM_MODEL: &str = "RETROCHAT_LLM_MODEL";

/// Custom path to Claude Code CLI binary (defaults to "claude" in PATH)
pub const CLAUDE_CODE_PATH: &str = "CLAUDE_CODE_PATH";

/// Custom path to Gemini CLI binary (defaults to "gemini" in PATH)
pub const GEMINI_CLI_PATH: &str = "GEMINI_CLI_PATH";
}
Loading