diff --git a/README.md b/README.md index 4615408..174e719 100644 --- a/README.md +++ b/README.md @@ -29,8 +29,7 @@ Download it from [GitHub Releases](https://github.com/sigoden/aichat/releases), - Supports multiple AIs, including openai and localai. - Support chat and command modes -- Predefine model [roles](#roles) -- Use GPT prompt easily +- Use [roles](#roles) - Powerful [Chat REPL](#chat-repl) - Context-aware conversation/session - Syntax highlighting markdown and 200 other languages @@ -91,9 +90,7 @@ clients: # Setup AIs ### Roles -We can let ChatGPT play a certain role through `prompt` to have it better generate what we want. - -We can predefine a batch of roles in `roles.yaml`. +We can define a batch of roles in `roles.yaml`. > We can get the location of `roles.yaml` through the repl's `.info` command or cli's `--info` option. @@ -167,7 +164,6 @@ AIChat also provides `.edit` command for multi-lines editing. .info Print system-wide information .set Modify the configuration temporarily .model Choose a model -.prompt Add a GPT prompt .role Select a role .clear role Clear the currently selected role .session Start a session @@ -216,30 +212,6 @@ vi_keybindings true > .model localai:gpt4all-j ``` -### `.prompt` - Set GPT prompt - -When you set up a prompt, every message sent later will carry the prompt. - -``` -〉{ .prompt -I want you to translate the sentences I write into emojis. -I will write the sentence, and you will express it with emojis. -I just want you to express it with emojis. -I want you to reply only with emojis. -} -Done - -P〉You are a genius -👉🧠💡👨‍🎓 - -P〉I'm embarrassed -🙈😳 -``` - -`.prompt` actually creates a temporary role internally, so **run `.clear role` to clear the prompt**. - -When you are satisfied with the prompt, add it to `roles.yaml` for later use. - ### `.role` - Let the AI play a role Select a role: diff --git a/src/cli.rs b/src/cli.rs index 193babb..2901ed4 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -15,9 +15,6 @@ pub struct Cli { /// Select a role #[clap(short, long)] pub role: Option, - /// Add a GPT prompt - #[clap(short, long)] - pub prompt: Option, /// List sessions #[clap(long)] pub list_sessions: bool, diff --git a/src/config/mod.rs b/src/config/mod.rs index 4dcc2c9..0af432c 100644 --- a/src/config/mod.rs +++ b/src/config/mod.rs @@ -192,12 +192,6 @@ impl Config { None => { format!("# CHAT:[{timestamp}]\n{input}\n--------\n{output}\n--------\n\n",) } - Some(v) if v.is_temp() => { - format!( - "# CHAT:[{timestamp}]\n{}\n{input}\n--------\n{output}\n--------\n\n", - v.prompt - ) - } Some(v) => { format!( "# CHAT:[{timestamp}] ({})\n{input}\n--------\n{output}\n--------\n\n", @@ -262,15 +256,6 @@ impl Config { Ok(()) } - pub fn add_prompt(&mut self, prompt: &str) -> Result<()> { - let role = Role::new(prompt, self.temperature); - if let Some(session) = self.session.as_mut() { - session.update_role(Some(role.clone()))?; - } - self.role = Some(role); - Ok(()) - } - pub fn get_temperature(&self) -> Option { self.role .as_ref() diff --git a/src/config/role.rs b/src/config/role.rs index fee989e..87aca5e 100644 --- a/src/config/role.rs +++ b/src/config/role.rs @@ -3,7 +3,6 @@ use super::message::{Message, MessageRole}; use anyhow::{Context, Result}; use serde::{Deserialize, Serialize}; -const TEMP_ROLE_NAME: &str = "temp"; const INPUT_PLACEHOLDER: &str = "__INPUT__"; #[derive(Debug, Clone, Deserialize, Serialize)] @@ -17,18 +16,6 @@ pub struct Role { } impl Role { - pub fn new(prompt: &str, temperature: Option) -> Self { - Self { - name: TEMP_ROLE_NAME.into(), - prompt: prompt.into(), - temperature, - } - } - - pub fn is_temp(&self) -> bool { - self.name == TEMP_ROLE_NAME - } - pub fn info(&self) -> Result { let output = serde_yaml::to_string(&self) .with_context(|| format!("Unable to show info about role {}", &self.name))?; diff --git a/src/main.rs b/src/main.rs index 6baf198..0d12731 100644 --- a/src/main.rs +++ b/src/main.rs @@ -75,9 +75,6 @@ fn main() -> Result<()> { if cli.no_highlight { config.write().highlight = false; } - if let Some(prompt) = &cli.prompt { - config.write().add_prompt(prompt)?; - } if cli.info { let info = if let Some(session) = &config.read().session { session.info()? diff --git a/src/repl/handler.rs b/src/repl/handler.rs index bafad14..ff6cf56 100644 --- a/src/repl/handler.rs +++ b/src/repl/handler.rs @@ -18,7 +18,6 @@ pub enum ReplCmd { SetModel(String), SetRole(String), UpdateConfig(String), - Prompt(String), ClearRole, ViewInfo, StartSession(Option), @@ -79,10 +78,6 @@ impl ReplCmdHandler { self.config.write().clear_role()?; print_now!("\n"); } - ReplCmd::Prompt(prompt) => { - self.config.write().add_prompt(&prompt)?; - print_now!("\n"); - } ReplCmd::ViewInfo => { let output = self.config.read().info()?; print_now!("{}\n\n", output.trim_end()); diff --git a/src/repl/mod.rs b/src/repl/mod.rs index fadb81f..46d0cca 100644 --- a/src/repl/mod.rs +++ b/src/repl/mod.rs @@ -19,11 +19,10 @@ use lazy_static::lazy_static; use reedline::Signal; use std::rc::Rc; -pub const REPL_COMMANDS: [(&str, &str); 15] = [ +pub const REPL_COMMANDS: [(&str, &str); 14] = [ (".info", "Print system-wide information"), (".set", "Modify the configuration temporarily"), (".model", "Choose a model"), - (".prompt", "Add a GPT prompt"), (".role", "Select a role"), (".clear role", "Clear the currently selected role"), (".session", "Start a session"), @@ -134,14 +133,6 @@ impl Repl { handler.handle(ReplCmd::UpdateConfig(args.unwrap_or_default().to_string()))?; self.prompt.sync_config(); } - ".prompt" => { - let text = args.unwrap_or_default().to_string(); - if text.is_empty() { - print_now!("Usage: .prompt .\n\n"); - } else { - handler.handle(ReplCmd::Prompt(text))?; - } - } ".session" => { handler.handle(ReplCmd::StartSession(args.map(|v| v.to_string())))?; }