notedeck

One damus client to rule them all
git clone git://jb55.com/notedeck
Log | Files | Refs | README | LICENSE

config.rs (12086B)


      1 use crate::backend::BackendType;
      2 use async_openai::config::OpenAIConfig;
      3 use serde::{Deserialize, Serialize};
      4 use std::env;
      5 
      6 /// Check if a binary exists on the system PATH.
      7 pub fn has_binary_on_path(binary: &str) -> bool {
      8     env::var_os("PATH")
      9         .map(|paths| env::split_paths(&paths).any(|dir| dir.join(binary).is_file()))
     10         .unwrap_or(false)
     11         || env::var_os("PATH")
     12             .map(|paths| {
     13                 env::split_paths(&paths).any(|dir| dir.join(format!("{}.exe", binary)).is_file())
     14             })
     15             .unwrap_or(false)
     16 }
     17 
     18 /// Detect which agentic backends are available based on binaries in PATH.
     19 pub fn available_agentic_backends() -> Vec<BackendType> {
     20     let mut backends = Vec::new();
     21     if has_binary_on_path("claude") {
     22         backends.push(BackendType::Claude);
     23     }
     24     if has_binary_on_path("codex") {
     25         backends.push(BackendType::Codex);
     26     }
     27     backends
     28 }
     29 
     30 /// AI interaction mode - determines UI complexity and feature set
     31 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
     32 pub enum AiMode {
     33     /// Simple chat interface (OpenAI-style) - no permissions, no CWD, no scene view
     34     Chat,
     35     /// Full IDE with permissions, sessions, scene view, etc. (Claude backend)
     36     Agentic,
     37 }
     38 
     39 /// Available AI providers for Dave
     40 #[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, Deserialize)]
     41 pub enum AiProvider {
     42     #[default]
     43     OpenAI,
     44     Anthropic,
     45     Ollama,
     46     Codex,
     47 }
     48 
     49 impl AiProvider {
     50     pub const ALL: [AiProvider; 4] = [
     51         AiProvider::OpenAI,
     52         AiProvider::Anthropic,
     53         AiProvider::Ollama,
     54         AiProvider::Codex,
     55     ];
     56 
     57     pub fn name(&self) -> &'static str {
     58         match self {
     59             AiProvider::OpenAI => "OpenAI",
     60             AiProvider::Anthropic => "Anthropic",
     61             AiProvider::Ollama => "Ollama",
     62             AiProvider::Codex => "Codex",
     63         }
     64     }
     65 
     66     pub fn default_model(&self) -> &'static str {
     67         match self {
     68             AiProvider::OpenAI => "gpt-5.2",
     69             AiProvider::Anthropic => "claude-sonnet-4-20250514",
     70             AiProvider::Ollama => "hhao/qwen2.5-coder-tools:latest",
     71             AiProvider::Codex => "gpt-5.3-codex",
     72         }
     73     }
     74 
     75     pub fn default_endpoint(&self) -> Option<&'static str> {
     76         match self {
     77             AiProvider::OpenAI | AiProvider::Codex => None,
     78             AiProvider::Anthropic => Some("https://api.anthropic.com/v1"),
     79             AiProvider::Ollama => Some("http://localhost:11434/v1"),
     80         }
     81     }
     82 
     83     pub fn requires_api_key(&self) -> bool {
     84         match self {
     85             AiProvider::OpenAI | AiProvider::Anthropic => true,
     86             AiProvider::Ollama | AiProvider::Codex => false,
     87         }
     88     }
     89 
     90     pub fn available_models(&self) -> &'static [&'static str] {
     91         match self {
     92             AiProvider::OpenAI => &["gpt-5.2"],
     93             AiProvider::Anthropic => &[
     94                 "claude-sonnet-4-20250514",
     95                 "claude-opus-4-20250514",
     96                 "claude-3-5-sonnet-20241022",
     97                 "claude-3-5-haiku-20241022",
     98             ],
     99             AiProvider::Ollama => &[
    100                 "hhao/qwen2.5-coder-tools:latest",
    101                 "llama3.2:latest",
    102                 "mistral:latest",
    103                 "codellama:latest",
    104             ],
    105             AiProvider::Codex => &[
    106                 "gpt-5.3-codex",
    107                 "gpt-5.2-codex",
    108                 "gpt-5-codex",
    109                 "gpt-5-codex-mini",
    110                 "codex-mini-latest",
    111             ],
    112         }
    113     }
    114 }
    115 
    116 /// User-configurable settings for Dave AI
    117 #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
    118 pub struct DaveSettings {
    119     pub provider: AiProvider,
    120     pub model: String,
    121     pub endpoint: Option<String>,
    122     pub api_key: Option<String>,
    123     pub pns_relay: Option<String>,
    124 }
    125 
    126 impl Default for DaveSettings {
    127     fn default() -> Self {
    128         DaveSettings {
    129             provider: AiProvider::default(),
    130             model: AiProvider::default().default_model().to_string(),
    131             endpoint: None,
    132             api_key: None,
    133             pns_relay: None,
    134         }
    135     }
    136 }
    137 
    138 impl DaveSettings {
    139     /// Create settings with provider defaults applied
    140     pub fn with_provider(provider: AiProvider) -> Self {
    141         DaveSettings {
    142             provider,
    143             model: provider.default_model().to_string(),
    144             endpoint: provider.default_endpoint().map(|s| s.to_string()),
    145             api_key: None,
    146             pns_relay: None,
    147         }
    148     }
    149 
    150     /// Create settings from an existing ModelConfig (preserves env var values)
    151     pub fn from_model_config(config: &ModelConfig) -> Self {
    152         let provider = match config.backend {
    153             BackendType::OpenAI | BackendType::Remote => AiProvider::OpenAI,
    154             BackendType::Claude => AiProvider::Anthropic,
    155             BackendType::Codex => AiProvider::Codex,
    156         };
    157 
    158         let api_key = match provider {
    159             AiProvider::Anthropic => config.anthropic_api_key.clone(),
    160             _ => config.api_key().map(|s| s.to_string()),
    161         };
    162 
    163         DaveSettings {
    164             provider,
    165             model: config.model().to_string(),
    166             endpoint: config
    167                 .endpoint()
    168                 .map(|s| s.to_string())
    169                 .or_else(|| provider.default_endpoint().map(|s| s.to_string())),
    170             api_key,
    171             pns_relay: config.pns_relay.clone(),
    172         }
    173     }
    174 }
    175 
    176 #[derive(Debug)]
    177 pub struct ModelConfig {
    178     pub trial: bool,
    179     pub backend: BackendType,
    180     endpoint: Option<String>,
    181     model: String,
    182     api_key: Option<String>,
    183     pub anthropic_api_key: Option<String>,
    184     pub pns_relay: Option<String>,
    185 }
    186 
    187 // short-term trial key for testing
    188 const DAVE_TRIAL: &str = unsafe {
    189     std::str::from_utf8_unchecked(&[
    190         0x73, 0x6b, 0x2d, 0x70, 0x72, 0x6f, 0x6a, 0x2d, 0x54, 0x6b, 0x61, 0x48, 0x46, 0x32, 0x73,
    191         0x72, 0x43, 0x59, 0x73, 0x5a, 0x62, 0x33, 0x6f, 0x6b, 0x43, 0x75, 0x61, 0x78, 0x39, 0x57,
    192         0x76, 0x72, 0x41, 0x46, 0x67, 0x5f, 0x39, 0x58, 0x78, 0x35, 0x65, 0x37, 0x4b, 0x53, 0x36,
    193         0x76, 0x32, 0x32, 0x51, 0x30, 0x67, 0x48, 0x61, 0x58, 0x6b, 0x67, 0x6e, 0x4e, 0x4d, 0x63,
    194         0x7a, 0x69, 0x72, 0x5f, 0x44, 0x57, 0x6e, 0x7a, 0x43, 0x77, 0x52, 0x50, 0x4e, 0x50, 0x39,
    195         0x6b, 0x5a, 0x79, 0x75, 0x57, 0x4c, 0x35, 0x54, 0x33, 0x42, 0x6c, 0x62, 0x6b, 0x46, 0x4a,
    196         0x72, 0x66, 0x49, 0x4b, 0x31, 0x77, 0x4f, 0x67, 0x31, 0x6a, 0x37, 0x54, 0x57, 0x42, 0x5a,
    197         0x67, 0x66, 0x49, 0x75, 0x30, 0x51, 0x48, 0x4e, 0x31, 0x70, 0x6a, 0x72, 0x37, 0x4b, 0x38,
    198         0x55, 0x54, 0x6d, 0x34, 0x50, 0x6f, 0x65, 0x47, 0x39, 0x61, 0x35, 0x79, 0x6c, 0x78, 0x45,
    199         0x4f, 0x6f, 0x74, 0x43, 0x47, 0x42, 0x36, 0x65, 0x7a, 0x59, 0x5a, 0x37, 0x70, 0x54, 0x38,
    200         0x63, 0x44, 0x75, 0x66, 0x75, 0x36, 0x52, 0x4d, 0x6b, 0x6c, 0x2d, 0x44, 0x51, 0x41,
    201     ])
    202 };
    203 
    204 impl Default for ModelConfig {
    205     fn default() -> Self {
    206         let api_key = std::env::var("DAVE_API_KEY")
    207             .ok()
    208             .or(std::env::var("OPENAI_API_KEY").ok());
    209 
    210         let anthropic_api_key = std::env::var("ANTHROPIC_API_KEY")
    211             .ok()
    212             .or(std::env::var("CLAUDE_API_KEY").ok());
    213 
    214         // Determine backend: explicit env var takes precedence, otherwise auto-detect
    215         let backend = if let Ok(backend_str) = std::env::var("DAVE_BACKEND") {
    216             match backend_str.to_lowercase().as_str() {
    217                 "claude" | "anthropic" => BackendType::Claude,
    218                 "openai" => BackendType::OpenAI,
    219                 "codex" => BackendType::Codex,
    220                 _ => {
    221                     tracing::warn!(
    222                         "Unknown DAVE_BACKEND value: {}, defaulting to OpenAI",
    223                         backend_str
    224                     );
    225                     BackendType::OpenAI
    226                 }
    227             }
    228         } else {
    229             // Auto-detect: prefer agentic backends if their CLI binary is on PATH,
    230             // then fall back to API-key detection, then OpenAI (with trial key).
    231             if has_binary_on_path("claude") {
    232                 BackendType::Claude
    233             } else if has_binary_on_path("codex") {
    234                 BackendType::Codex
    235             } else if anthropic_api_key.is_some() {
    236                 BackendType::Claude
    237             } else {
    238                 BackendType::OpenAI
    239             }
    240         };
    241 
    242         // trial mode?
    243         let trial = api_key.is_none() && backend == BackendType::OpenAI;
    244         let api_key = if backend == BackendType::OpenAI {
    245             api_key.or(Some(DAVE_TRIAL.to_string()))
    246         } else {
    247             api_key
    248         };
    249 
    250         let model = std::env::var("DAVE_MODEL")
    251             .ok()
    252             .unwrap_or_else(|| match backend {
    253                 BackendType::OpenAI => "gpt-4.1-mini".to_string(),
    254                 BackendType::Claude => "claude-sonnet-4.5".to_string(),
    255                 BackendType::Codex => AiProvider::Codex.default_model().to_string(),
    256                 BackendType::Remote => String::new(),
    257             });
    258 
    259         ModelConfig {
    260             trial,
    261             backend,
    262             endpoint: std::env::var("DAVE_ENDPOINT").ok(),
    263             model,
    264             api_key,
    265             anthropic_api_key,
    266             pns_relay: std::env::var("DAVE_RELAY").ok(),
    267         }
    268     }
    269 }
    270 
    271 impl ModelConfig {
    272     pub fn ai_mode(&self) -> AiMode {
    273         match self.backend {
    274             BackendType::Claude | BackendType::Codex => AiMode::Agentic,
    275             BackendType::OpenAI | BackendType::Remote => AiMode::Chat,
    276         }
    277     }
    278 
    279     pub fn model(&self) -> &str {
    280         &self.model
    281     }
    282 
    283     pub fn endpoint(&self) -> Option<&str> {
    284         self.endpoint.as_deref()
    285     }
    286 
    287     pub fn api_key(&self) -> Option<&str> {
    288         self.api_key.as_deref()
    289     }
    290 
    291     pub fn ollama() -> Self {
    292         ModelConfig {
    293             trial: false,
    294             backend: BackendType::OpenAI, // Ollama uses OpenAI-compatible API
    295             endpoint: std::env::var("OLLAMA_HOST").ok().map(|h| h + "/v1"),
    296             model: "hhao/qwen2.5-coder-tools:latest".to_string(),
    297             api_key: None,
    298             anthropic_api_key: None,
    299             pns_relay: None,
    300         }
    301     }
    302 
    303     /// Create a ModelConfig from DaveSettings
    304     pub fn from_settings(settings: &DaveSettings) -> Self {
    305         // If settings have an API key, we're not in trial mode
    306         // For Ollama, trial is always false since no key is required
    307         let trial = settings.provider.requires_api_key() && settings.api_key.is_none();
    308 
    309         let backend = match settings.provider {
    310             AiProvider::OpenAI | AiProvider::Ollama => BackendType::OpenAI,
    311             AiProvider::Anthropic => BackendType::Claude,
    312             AiProvider::Codex => BackendType::Codex,
    313         };
    314 
    315         let anthropic_api_key = if settings.provider == AiProvider::Anthropic {
    316             settings.api_key.clone()
    317         } else {
    318             None
    319         };
    320 
    321         let api_key = if settings.provider != AiProvider::Anthropic {
    322             settings.api_key.clone()
    323         } else {
    324             None
    325         };
    326 
    327         ModelConfig {
    328             trial,
    329             backend,
    330             endpoint: settings.endpoint.clone(),
    331             model: settings.model.clone(),
    332             api_key,
    333             anthropic_api_key,
    334             pns_relay: settings.pns_relay.clone(),
    335         }
    336     }
    337 
    338     /// Create a trial-mode config (uses embedded trial key with gpt-4.1-mini)
    339     pub fn trial() -> Self {
    340         ModelConfig {
    341             trial: true,
    342             backend: BackendType::OpenAI,
    343             endpoint: None,
    344             model: "gpt-4.1-mini".to_string(),
    345             api_key: Some(DAVE_TRIAL.to_string()),
    346             anthropic_api_key: None,
    347             pns_relay: None,
    348         }
    349     }
    350 
    351     pub fn to_api(&self) -> OpenAIConfig {
    352         let mut cfg = OpenAIConfig::new();
    353         if let Some(endpoint) = &self.endpoint {
    354             cfg = cfg.with_api_base(endpoint.to_owned());
    355         }
    356 
    357         if let Some(api_key) = &self.api_key {
    358             cfg = cfg.with_api_key(api_key.to_owned());
    359         }
    360 
    361         cfg
    362     }
    363 }