Compare commits

..

5 Commits

Author SHA1 Message Date
aa0 ffa2bc9bd2
Remove commented code in `get_last_vqd`. 2024-08-05 06:54:25 +01:00
Ahmad-A0 c49bbd2692 Update README and config.rs to mark GPT4o as the newer model 2024-08-04 02:20:51 +01:00
Ahmad-A0 f0d3ef36e2 Disable retrieving the vqd, as it seems to break things 2024-08-04 02:12:50 +01:00
Ahmad-A0 (aider) 146ea25974 fix: Update GPT4OMini model identifier 2024-08-04 01:41:58 +01:00
Ahmad-A0 (aider) 1d9a05fb51 feat: Add gpt-4o-mini model to config 2024-08-04 01:41:15 +01:00
3 changed files with 8 additions and 10 deletions

View File

@ -45,7 +45,7 @@ you can set their paths and filenames via `HEY_CONFIG_PATH`, `HEY_CONFIG_FILENAM
## config file reference ## config file reference
```toml ```toml
model = "Claude" # or "GPT3" model = "Claude" # or "GPT4OMini"
tos = false # whether if you agree to ddg chat tos tos = false # whether if you agree to ddg chat tos
``` ```

View File

@ -76,10 +76,6 @@ impl Cache {
} }
pub fn get_last_vqd<'a, T: From<&'a String>>(self: &'a Self) -> Option<T> { pub fn get_last_vqd<'a, T: From<&'a String>>(self: &'a Self) -> Option<T> {
if self.last_vqd_time - (chrono::Local::now().timestamp_millis() as u64) < 60000 {
Some((&self.last_vqd).into())
} else {
None None
} }
} }
}

View File

@ -8,10 +8,11 @@ pub enum Model {
// outdated // outdated
Claude12, Claude12,
GPT35, GPT35,
GPT3,
// current // current
Claude, Claude,
GPT3, GPT4OMini,
Llama, Llama,
Mixtral Mixtral
} }
@ -25,7 +26,8 @@ impl ToString for Model {
Self::Claude => String::from("claude-3-haiku-20240307"), Self::Claude => String::from("claude-3-haiku-20240307"),
Self::GPT3 => String::from("gpt-3.5-turbo-0125"), Self::GPT3 => String::from("gpt-3.5-turbo-0125"),
Self::Llama => String::from("meta-llama/Llama-3-70b-chat-hf"), Self::Llama => String::from("meta-llama/Llama-3-70b-chat-hf"),
Self::Mixtral => String::from("mistralai/Mixtral-8x7B-Instruct-v0.1") Self::Mixtral => String::from("mistralai/Mixtral-8x7B-Instruct-v0.1"),
Self::GPT4OMini => String::from("gpt-4o-mini")
} }
} }
} }