notedeck

One damus client to rule them all
git clone git://jb55.com/notedeck
Log | Files | Refs | README | LICENSE

config.rs (9489B)


      1 use crate::backend::BackendType;
      2 use async_openai::config::OpenAIConfig;
      3 
      4 /// AI interaction mode - determines UI complexity and feature set
      5 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
      6 pub enum AiMode {
      7     /// Simple chat interface (OpenAI-style) - no permissions, no CWD, no scene view
      8     Chat,
      9     /// Full IDE with permissions, sessions, scene view, etc. (Claude backend)
     10     Agentic,
     11 }
     12 
     13 /// Available AI providers for Dave
     14 #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
     15 pub enum AiProvider {
     16     #[default]
     17     OpenAI,
     18     Anthropic,
     19     Ollama,
     20 }
     21 
     22 impl AiProvider {
     23     pub const ALL: [AiProvider; 3] = [
     24         AiProvider::OpenAI,
     25         AiProvider::Anthropic,
     26         AiProvider::Ollama,
     27     ];
     28 
     29     pub fn name(&self) -> &'static str {
     30         match self {
     31             AiProvider::OpenAI => "OpenAI",
     32             AiProvider::Anthropic => "Anthropic",
     33             AiProvider::Ollama => "Ollama",
     34         }
     35     }
     36 
     37     pub fn default_model(&self) -> &'static str {
     38         match self {
     39             AiProvider::OpenAI => "gpt-4o",
     40             AiProvider::Anthropic => "claude-sonnet-4-20250514",
     41             AiProvider::Ollama => "hhao/qwen2.5-coder-tools:latest",
     42         }
     43     }
     44 
     45     pub fn default_endpoint(&self) -> Option<&'static str> {
     46         match self {
     47             AiProvider::OpenAI => None,
     48             AiProvider::Anthropic => Some("https://api.anthropic.com/v1"),
     49             AiProvider::Ollama => Some("http://localhost:11434/v1"),
     50         }
     51     }
     52 
     53     pub fn requires_api_key(&self) -> bool {
     54         match self {
     55             AiProvider::OpenAI | AiProvider::Anthropic => true,
     56             AiProvider::Ollama => false,
     57         }
     58     }
     59 
     60     pub fn available_models(&self) -> &'static [&'static str] {
     61         match self {
     62             AiProvider::OpenAI => &["gpt-4o", "gpt-4o-mini", "gpt-4-turbo", "gpt-3.5-turbo"],
     63             AiProvider::Anthropic => &[
     64                 "claude-sonnet-4-20250514",
     65                 "claude-opus-4-20250514",
     66                 "claude-3-5-sonnet-20241022",
     67                 "claude-3-5-haiku-20241022",
     68             ],
     69             AiProvider::Ollama => &[
     70                 "hhao/qwen2.5-coder-tools:latest",
     71                 "llama3.2:latest",
     72                 "mistral:latest",
     73                 "codellama:latest",
     74             ],
     75         }
     76     }
     77 }
     78 
     79 /// User-configurable settings for Dave AI
     80 #[derive(Debug, Clone)]
     81 pub struct DaveSettings {
     82     pub provider: AiProvider,
     83     pub model: String,
     84     pub endpoint: Option<String>,
     85     pub api_key: Option<String>,
     86 }
     87 
     88 impl Default for DaveSettings {
     89     fn default() -> Self {
     90         DaveSettings {
     91             provider: AiProvider::default(),
     92             model: AiProvider::default().default_model().to_string(),
     93             endpoint: None,
     94             api_key: None,
     95         }
     96     }
     97 }
     98 
     99 impl DaveSettings {
    100     /// Create settings with provider defaults applied
    101     pub fn with_provider(provider: AiProvider) -> Self {
    102         DaveSettings {
    103             provider,
    104             model: provider.default_model().to_string(),
    105             endpoint: provider.default_endpoint().map(|s| s.to_string()),
    106             api_key: None,
    107         }
    108     }
    109 
    110     /// Create settings from an existing ModelConfig (preserves env var values)
    111     pub fn from_model_config(config: &ModelConfig) -> Self {
    112         let provider = match config.backend {
    113             BackendType::OpenAI => AiProvider::OpenAI,
    114             BackendType::Claude => AiProvider::Anthropic,
    115         };
    116 
    117         let api_key = match provider {
    118             AiProvider::Anthropic => config.anthropic_api_key.clone(),
    119             _ => config.api_key().map(|s| s.to_string()),
    120         };
    121 
    122         DaveSettings {
    123             provider,
    124             model: config.model().to_string(),
    125             endpoint: config
    126                 .endpoint()
    127                 .map(|s| s.to_string())
    128                 .or_else(|| provider.default_endpoint().map(|s| s.to_string())),
    129             api_key,
    130         }
    131     }
    132 }
    133 
    134 #[derive(Debug)]
    135 pub struct ModelConfig {
    136     pub trial: bool,
    137     pub backend: BackendType,
    138     endpoint: Option<String>,
    139     model: String,
    140     api_key: Option<String>,
    141     pub anthropic_api_key: Option<String>,
    142 }
    143 
    144 // short-term trial key for testing
    145 const DAVE_TRIAL: &str = unsafe {
    146     std::str::from_utf8_unchecked(&[
    147         0x73, 0x6b, 0x2d, 0x70, 0x72, 0x6f, 0x6a, 0x2d, 0x54, 0x6b, 0x61, 0x48, 0x46, 0x32, 0x73,
    148         0x72, 0x43, 0x59, 0x73, 0x5a, 0x62, 0x33, 0x6f, 0x6b, 0x43, 0x75, 0x61, 0x78, 0x39, 0x57,
    149         0x76, 0x72, 0x41, 0x46, 0x67, 0x5f, 0x39, 0x58, 0x78, 0x35, 0x65, 0x37, 0x4b, 0x53, 0x36,
    150         0x76, 0x32, 0x32, 0x51, 0x30, 0x67, 0x48, 0x61, 0x58, 0x6b, 0x67, 0x6e, 0x4e, 0x4d, 0x63,
    151         0x7a, 0x69, 0x72, 0x5f, 0x44, 0x57, 0x6e, 0x7a, 0x43, 0x77, 0x52, 0x50, 0x4e, 0x50, 0x39,
    152         0x6b, 0x5a, 0x79, 0x75, 0x57, 0x4c, 0x35, 0x54, 0x33, 0x42, 0x6c, 0x62, 0x6b, 0x46, 0x4a,
    153         0x72, 0x66, 0x49, 0x4b, 0x31, 0x77, 0x4f, 0x67, 0x31, 0x6a, 0x37, 0x54, 0x57, 0x42, 0x5a,
    154         0x67, 0x66, 0x49, 0x75, 0x30, 0x51, 0x48, 0x4e, 0x31, 0x70, 0x6a, 0x72, 0x37, 0x4b, 0x38,
    155         0x55, 0x54, 0x6d, 0x34, 0x50, 0x6f, 0x65, 0x47, 0x39, 0x61, 0x35, 0x79, 0x6c, 0x78, 0x45,
    156         0x4f, 0x6f, 0x74, 0x43, 0x47, 0x42, 0x36, 0x65, 0x7a, 0x59, 0x5a, 0x37, 0x70, 0x54, 0x38,
    157         0x63, 0x44, 0x75, 0x66, 0x75, 0x36, 0x52, 0x4d, 0x6b, 0x6c, 0x2d, 0x44, 0x51, 0x41,
    158     ])
    159 };
    160 
    161 impl Default for ModelConfig {
    162     fn default() -> Self {
    163         let api_key = std::env::var("DAVE_API_KEY")
    164             .ok()
    165             .or(std::env::var("OPENAI_API_KEY").ok());
    166 
    167         let anthropic_api_key = std::env::var("ANTHROPIC_API_KEY")
    168             .ok()
    169             .or(std::env::var("CLAUDE_API_KEY").ok());
    170 
    171         // Determine backend: explicit env var takes precedence, otherwise auto-detect
    172         let backend = if let Ok(backend_str) = std::env::var("DAVE_BACKEND") {
    173             match backend_str.to_lowercase().as_str() {
    174                 "claude" | "anthropic" => BackendType::Claude,
    175                 "openai" => BackendType::OpenAI,
    176                 _ => {
    177                     tracing::warn!(
    178                         "Unknown DAVE_BACKEND value: {}, defaulting to OpenAI",
    179                         backend_str
    180                     );
    181                     BackendType::OpenAI
    182                 }
    183             }
    184         } else {
    185             // Auto-detect: prefer Claude if key is available, otherwise OpenAI
    186             if anthropic_api_key.is_some() {
    187                 BackendType::Claude
    188             } else {
    189                 BackendType::OpenAI
    190             }
    191         };
    192 
    193         // trial mode?
    194         let trial = api_key.is_none() && backend == BackendType::OpenAI;
    195         let api_key = if backend == BackendType::OpenAI {
    196             api_key.or(Some(DAVE_TRIAL.to_string()))
    197         } else {
    198             api_key
    199         };
    200 
    201         let model = std::env::var("DAVE_MODEL")
    202             .ok()
    203             .unwrap_or_else(|| match backend {
    204                 BackendType::OpenAI => "gpt-4o".to_string(),
    205                 BackendType::Claude => "claude-sonnet-4.5".to_string(),
    206             });
    207 
    208         ModelConfig {
    209             trial,
    210             backend,
    211             endpoint: std::env::var("DAVE_ENDPOINT").ok(),
    212             model,
    213             api_key,
    214             anthropic_api_key,
    215         }
    216     }
    217 }
    218 
    219 impl ModelConfig {
    220     pub fn ai_mode(&self) -> AiMode {
    221         match self.backend {
    222             BackendType::Claude => AiMode::Agentic,
    223             BackendType::OpenAI => AiMode::Chat,
    224         }
    225     }
    226 
    227     pub fn model(&self) -> &str {
    228         &self.model
    229     }
    230 
    231     pub fn endpoint(&self) -> Option<&str> {
    232         self.endpoint.as_deref()
    233     }
    234 
    235     pub fn api_key(&self) -> Option<&str> {
    236         self.api_key.as_deref()
    237     }
    238 
    239     pub fn ollama() -> Self {
    240         ModelConfig {
    241             trial: false,
    242             backend: BackendType::OpenAI, // Ollama uses OpenAI-compatible API
    243             endpoint: std::env::var("OLLAMA_HOST").ok().map(|h| h + "/v1"),
    244             model: "hhao/qwen2.5-coder-tools:latest".to_string(),
    245             api_key: None,
    246             anthropic_api_key: None,
    247         }
    248     }
    249 
    250     /// Create a ModelConfig from DaveSettings
    251     pub fn from_settings(settings: &DaveSettings) -> Self {
    252         // If settings have an API key, we're not in trial mode
    253         // For Ollama, trial is always false since no key is required
    254         let trial = settings.provider.requires_api_key() && settings.api_key.is_none();
    255 
    256         let backend = match settings.provider {
    257             AiProvider::OpenAI | AiProvider::Ollama => BackendType::OpenAI,
    258             AiProvider::Anthropic => BackendType::Claude,
    259         };
    260 
    261         let anthropic_api_key = if settings.provider == AiProvider::Anthropic {
    262             settings.api_key.clone()
    263         } else {
    264             None
    265         };
    266 
    267         let api_key = if settings.provider != AiProvider::Anthropic {
    268             settings.api_key.clone()
    269         } else {
    270             None
    271         };
    272 
    273         ModelConfig {
    274             trial,
    275             backend,
    276             endpoint: settings.endpoint.clone(),
    277             model: settings.model.clone(),
    278             api_key,
    279             anthropic_api_key,
    280         }
    281     }
    282 
    283     pub fn to_api(&self) -> OpenAIConfig {
    284         let mut cfg = OpenAIConfig::new();
    285         if let Some(endpoint) = &self.endpoint {
    286             cfg = cfg.with_api_base(endpoint.to_owned());
    287         }
    288 
    289         if let Some(api_key) = &self.api_key {
    290             cfg = cfg.with_api_key(api_key.to_owned());
    291         }
    292 
    293         cfg
    294     }
    295 }