chore: improve code and doc

pull/167/head
sigoden 11 months ago
parent 2ab2e23bb0
commit 7f5bad6474

@ -27,9 +27,9 @@ Download it from [GitHub Releases](https://github.com/sigoden/aichat/releases),
## Features
- Supports multiple platforms, including openai and localai.
- Supports multiple AIs, including openai and localai.
- Support chat and command modes
- Predefine AI [roles](#roles)
- Predefine model [roles](#roles)
- Use GPT prompt easily
- Powerful [Chat REPL](#chat-repl)
- Context-aware conversation/session
@ -46,7 +46,7 @@ On first launch, aichat will guide you through the configuration.
```
> No config file, create a new one? Yes
> Select platform? openai
> Select AI? openai
> API key: sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
> Has Organization? No
> Use proxy? No
@ -64,7 +64,7 @@ light_theme: false # If set true, use light theme
auto_copy: false # Automatically copy the last output to the clipboard
keybindings: emacs # REPL keybindings, possible values: emacs (default), vi
clients: # Setup LLM platforms
clients: # Setup AIs
- type: openai # OpenAI configuration
api_key: sk-xxx # Request via https://platform.openai.com/account/api-keys
@ -266,7 +266,8 @@ Hello there! How can I assist you today?
## Session - context-aware conversation
By default, aichat behaves in a one-off request/response manner.
You should run aichat with "-s/--session" or use the ".session" command to start a session.
You should run aichat with `-s/--session` or use the `.session` command to start a session.
```

@ -60,7 +60,7 @@ pub struct Config {
pub auto_copy: bool,
/// REPL keybindings, possible values: emacs (default), vi
pub keybindings: Keybindings,
/// Setup LLM platforms
/// Setup AIs
pub clients: Vec<ClientConfig>,
/// Predefined roles
#[serde(skip)]
@ -571,7 +571,7 @@ impl Config {
fn compat_old_config(&mut self, config_path: &PathBuf) -> Result<()> {
let content = read_to_string(config_path)?;
let value: serde_json::Value = serde_yaml::from_str(&content)?;
if value.get("client").is_some() {
if value.get("clients").is_some() {
return Ok(());
}
@ -632,7 +632,7 @@ fn create_config_file(config_path: &Path) -> Result<()> {
exit(0);
}
let client = Select::new("Select platform?", all_clients())
let client = Select::new("Select AI?", all_clients())
.prompt()
.map_err(|_| anyhow!("An error happened when selecting platform, try again later."))?;

Loading…
Cancel
Save