feat: deprecate prompt, remove --prompt and .prompt (#176)

This commit is contained in:
sigoden 2023-10-30 10:36:08 +08:00 committed by GitHub
parent 44ea384ee5
commit b3e6879438
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 3 additions and 79 deletions

View File

@ -29,8 +29,7 @@ Download it from [GitHub Releases](https://github.com/sigoden/aichat/releases),
- Supports multiple AIs, including openai and localai.
- Support chat and command modes
- Predefine model [roles](#roles)
- Use GPT prompt easily
- Use [roles](#roles)
- Powerful [Chat REPL](#chat-repl)
- Context-aware conversation/session
- Syntax highlighting markdown and 200 other languages
@ -91,9 +90,7 @@ clients: # Setup AIs
### Roles
We can let ChatGPT play a certain role through `prompt` to have it better generate what we want.
We can predefine a batch of roles in `roles.yaml`.
We can define a batch of roles in `roles.yaml`.
> We can get the location of `roles.yaml` through the repl's `.info` command or cli's `--info` option.
@ -167,7 +164,6 @@ AIChat also provides `.edit` command for multi-lines editing.
.info Print system-wide information
.set Modify the configuration temporarily
.model Choose a model
.prompt Add a GPT prompt
.role Select a role
.clear role Clear the currently selected role
.session Start a session
@ -216,30 +212,6 @@ vi_keybindings true
> .model localai:gpt4all-j
```
### `.prompt` - Set GPT prompt
When you set up a prompt, every message sent later will carry the prompt.
```
〉{ .prompt
I want you to translate the sentences I write into emojis.
I will write the sentence, and you will express it with emojis.
I just want you to express it with emojis.
I want you to reply only with emojis.
}
Done
〉You are a genius
👉🧠💡👨‍🎓
〉I'm embarrassed
🙈😳
```
`.prompt` actually creates a temporary role internally, so **run `.clear role` to clear the prompt**.
When you are satisfied with the prompt, add it to `roles.yaml` for later use.
### `.role` - Let the AI play a role
Select a role:

View File

@ -15,9 +15,6 @@ pub struct Cli {
/// Select a role
#[clap(short, long)]
pub role: Option<String>,
/// Add a GPT prompt
#[clap(short, long)]
pub prompt: Option<String>,
/// List sessions
#[clap(long)]
pub list_sessions: bool,

View File

@ -192,12 +192,6 @@ impl Config {
None => {
format!("# CHAT:[{timestamp}]\n{input}\n--------\n{output}\n--------\n\n",)
}
Some(v) if v.is_temp() => {
format!(
"# CHAT:[{timestamp}]\n{}\n{input}\n--------\n{output}\n--------\n\n",
v.prompt
)
}
Some(v) => {
format!(
"# CHAT:[{timestamp}] ({})\n{input}\n--------\n{output}\n--------\n\n",
@ -262,15 +256,6 @@ impl Config {
Ok(())
}
pub fn add_prompt(&mut self, prompt: &str) -> Result<()> {
let role = Role::new(prompt, self.temperature);
if let Some(session) = self.session.as_mut() {
session.update_role(Some(role.clone()))?;
}
self.role = Some(role);
Ok(())
}
pub fn get_temperature(&self) -> Option<f64> {
self.role
.as_ref()

View File

@ -3,7 +3,6 @@ use super::message::{Message, MessageRole};
use anyhow::{Context, Result};
use serde::{Deserialize, Serialize};
const TEMP_ROLE_NAME: &str = "temp";
const INPUT_PLACEHOLDER: &str = "__INPUT__";
#[derive(Debug, Clone, Deserialize, Serialize)]
@ -17,18 +16,6 @@ pub struct Role {
}
impl Role {
pub fn new(prompt: &str, temperature: Option<f64>) -> Self {
Self {
name: TEMP_ROLE_NAME.into(),
prompt: prompt.into(),
temperature,
}
}
pub fn is_temp(&self) -> bool {
self.name == TEMP_ROLE_NAME
}
pub fn info(&self) -> Result<String> {
let output = serde_yaml::to_string(&self)
.with_context(|| format!("Unable to show info about role {}", &self.name))?;

View File

@ -75,9 +75,6 @@ fn main() -> Result<()> {
if cli.no_highlight {
config.write().highlight = false;
}
if let Some(prompt) = &cli.prompt {
config.write().add_prompt(prompt)?;
}
if cli.info {
let info = if let Some(session) = &config.read().session {
session.info()?

View File

@ -18,7 +18,6 @@ pub enum ReplCmd {
SetModel(String),
SetRole(String),
UpdateConfig(String),
Prompt(String),
ClearRole,
ViewInfo,
StartSession(Option<String>),
@ -79,10 +78,6 @@ impl ReplCmdHandler {
self.config.write().clear_role()?;
print_now!("\n");
}
ReplCmd::Prompt(prompt) => {
self.config.write().add_prompt(&prompt)?;
print_now!("\n");
}
ReplCmd::ViewInfo => {
let output = self.config.read().info()?;
print_now!("{}\n\n", output.trim_end());

View File

@ -19,11 +19,10 @@ use lazy_static::lazy_static;
use reedline::Signal;
use std::rc::Rc;
pub const REPL_COMMANDS: [(&str, &str); 15] = [
pub const REPL_COMMANDS: [(&str, &str); 14] = [
(".info", "Print system-wide information"),
(".set", "Modify the configuration temporarily"),
(".model", "Choose a model"),
(".prompt", "Add a GPT prompt"),
(".role", "Select a role"),
(".clear role", "Clear the currently selected role"),
(".session", "Start a session"),
@ -134,14 +133,6 @@ impl Repl {
handler.handle(ReplCmd::UpdateConfig(args.unwrap_or_default().to_string()))?;
self.prompt.sync_config();
}
".prompt" => {
let text = args.unwrap_or_default().to_string();
if text.is_empty() {
print_now!("Usage: .prompt <text>.\n\n");
} else {
handler.handle(ReplCmd::Prompt(text))?;
}
}
".session" => {
handler.handle(ReplCmd::StartSession(args.map(|v| v.to_string())))?;
}