With this I was able to get openrouter working: DAVE_API_KEY=$OPENROUTER_API_KEY DAVE_ENDPOINT=https://openrouter.ai/api/v1 DAVE_MODEL="google/gemini-2.0-flash-001" RUST_LOG=async_openai=debug,notedeck_dave=debug ./target/release/notedeck
50 lines
1.2 KiB
Rust
50 lines
1.2 KiB
Rust
use async_openai::config::OpenAIConfig;
|
|
|
|
#[derive(Debug)]
|
|
pub struct ModelConfig {
|
|
endpoint: Option<String>,
|
|
model: String,
|
|
api_key: Option<String>,
|
|
}
|
|
|
|
impl Default for ModelConfig {
|
|
fn default() -> Self {
|
|
ModelConfig {
|
|
endpoint: std::env::var("DAVE_ENDPOINT").ok(),
|
|
model: std::env::var("DAVE_MODEL")
|
|
.ok()
|
|
.unwrap_or("gpt-4o".to_string()),
|
|
api_key: std::env::var("DAVE_API_KEY")
|
|
.ok()
|
|
.or(std::env::var("OPENAI_API_KEY").ok()),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl ModelConfig {
|
|
pub fn model(&self) -> &str {
|
|
&self.model
|
|
}
|
|
|
|
pub fn ollama() -> Self {
|
|
ModelConfig {
|
|
endpoint: std::env::var("OLLAMA_HOST").ok().map(|h| h + "/v1"),
|
|
model: "hhao/qwen2.5-coder-tools:latest".to_string(),
|
|
api_key: None,
|
|
}
|
|
}
|
|
|
|
pub fn to_api(&self) -> OpenAIConfig {
|
|
let mut cfg = OpenAIConfig::new();
|
|
if let Some(endpoint) = &self.endpoint {
|
|
cfg = cfg.with_api_base(endpoint.to_owned());
|
|
}
|
|
|
|
if let Some(api_key) = &self.api_key {
|
|
cfg = cfg.with_api_key(api_key.to_owned());
|
|
}
|
|
|
|
cfg
|
|
}
|
|
}
|