mirror of
https://github.com/sigoden/aichat
synced 2024-11-16 06:15:26 +00:00
refactor: minor improvement (#638)
This commit is contained in:
parent
6d148c9c53
commit
777638049b
@ -4,7 +4,7 @@
|
||||
[![Crates](https://img.shields.io/crates/v/aichat.svg)](https://crates.io/crates/aichat)
|
||||
[![Discord](https://img.shields.io/discord/1226737085453701222?label=Discord)](https://discord.gg/mr3ZZUB9hG)
|
||||
|
||||
AIChat is an all-in-one AI CLI tool featuring Chat-REPL, RAG, Function Calling, AI Agents, and more.
|
||||
AIChat is an all-in-one AI CLI tool featuring Chat-REPL, Shell Assistant, RAG, Function Calling, AI Agents, and more.
|
||||
|
||||
![AIChat Command](https://github.com/sigoden/aichat/assets/4012553/84ae8382-62be-41d0-a0f1-101b113c5bc7)
|
||||
|
||||
@ -45,7 +45,7 @@ AIChat is an all-in-one AI CLI tool featuring Chat-REPL, RAG, Function Calling,
|
||||
- Moonshot (paid, function-calling)
|
||||
- Deepseek (paid)
|
||||
- ZhipuAI: GLM-4 (paid, vision, function-calling)
|
||||
- LingYiWanWu: Yi-Large (paid, vision)
|
||||
- LingYiWanWu: Yi-* (paid, vision)
|
||||
- Other openAI-compatible platforms
|
||||
|
||||
## Install
|
||||
|
@ -1,14 +1,14 @@
|
||||
# ---- llm ----
|
||||
model: openai:gpt-3.5-turbo # Specify the language model to use
|
||||
temperature: null # Set default temperature parameter
|
||||
top_p: null # Set default top-p parameter
|
||||
temperature: null # Set default temperature parameter, range (0, 2)
|
||||
top_p: null # Set default top-p parameter, range (0, 1)
|
||||
|
||||
# ---- apperence ----
|
||||
highlight: true # Controls syntax highlighting
|
||||
light_theme: false # Activates a light color theme when true. ENV: AICHAT_LIGHT_THEME
|
||||
# Custom REPL prompt, see https://github.com/sigoden/aichat/wiki/Custom-REPL-Prompt for more detils
|
||||
left_prompt:
|
||||
'{color.green}{?session {?agent {agent}#}{session}{?role /}}{!session {?agent {agent}}}{role}{?rag @{rag}}{color.cyan}{?session )}{!session >}{color.reset} '
|
||||
'{color.green}{?session {?agent {agent}>}{session}{?role /}}{!session {?agent {agent}>}}{role}{?rag @{rag}}{color.cyan}{?session )}{!session >}{color.reset} '
|
||||
right_prompt:
|
||||
'{color.purple}{?session {?consume_tokens {consume_tokens}({consume_percent}%)}{!consume_tokens {consume_tokens}}}{color.reset}'
|
||||
|
||||
@ -79,17 +79,17 @@ clients:
|
||||
# - type: xxxx
|
||||
# name: xxxx # Only use it to distinguish clients with the same client type. Optional
|
||||
# models:
|
||||
# - name: xxxx
|
||||
# - name: xxxx # Chat model
|
||||
# max_input_tokens: 100000
|
||||
# supports_vision: true
|
||||
# supports_function_calling: true
|
||||
# - name: xxxx
|
||||
# type: embedding # Embedding model
|
||||
# - name: xxxx # Embedding model
|
||||
# type: embedding
|
||||
# max_input_tokens: 2048
|
||||
# default_chunk_size: 2000
|
||||
# max_batch_size: 100
|
||||
# - name: xxxx
|
||||
# type: rerank # Rerank model
|
||||
# - name: xxxx # Rerank model
|
||||
# type: rerank
|
||||
# max_input_tokens: 2048
|
||||
# patches:
|
||||
# <regex>: # The regex to match model names, e.g. '.*' 'gpt-4o' 'gpt-4o|gpt-4-.*'
|
||||
|
@ -75,7 +75,7 @@ And answer according to the language of the user's question.
|
||||
Given the context information, answer the query.
|
||||
Query: __INPUT__"#;
|
||||
|
||||
const LEFT_PROMPT: &str = "{color.green}{?session {?agent {agent}#}{session}{?role /}}{!session {?agent {agent}}}{role}{?rag @{rag}}{color.cyan}{?session )}{!session >}{color.reset} ";
|
||||
const LEFT_PROMPT: &str = "{color.green}{?session {?agent {agent}>}{session}{?role /}}{!session {?agent {agent}>}}{role}{?rag @{rag}}{color.cyan}{?session )}{!session >}{color.reset} ";
|
||||
const RIGHT_PROMPT: &str = "{color.purple}{?session {?consume_tokens {consume_tokens}({consume_percent}%)}{!consume_tokens {consume_tokens}}}{color.reset}";
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
|
@ -247,7 +247,6 @@ impl RoleLike for Role {
|
||||
}
|
||||
|
||||
fn set_model(&mut self, model: &Model) {
|
||||
self.model_id = Some(model.id());
|
||||
self.model = model.clone();
|
||||
}
|
||||
|
||||
|
@ -123,7 +123,7 @@ lazy_static! {
|
||||
"Include files with the message",
|
||||
AssertState::pass()
|
||||
),
|
||||
ReplCommand::new(".continue", "Continue response", AssertState::pass()),
|
||||
ReplCommand::new(".continue", "Continue the response", AssertState::pass()),
|
||||
ReplCommand::new(
|
||||
".regenerate",
|
||||
"Regenerate the last response",
|
||||
@ -443,6 +443,16 @@ Type ".help" for additional help.
|
||||
KeyCode::Enter,
|
||||
ReedlineEvent::Edit(vec![EditCommand::InsertNewline]),
|
||||
);
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::SHIFT,
|
||||
KeyCode::Enter,
|
||||
ReedlineEvent::Edit(vec![EditCommand::InsertNewline]),
|
||||
);
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::ALT,
|
||||
KeyCode::Enter,
|
||||
ReedlineEvent::Edit(vec![EditCommand::InsertNewline]),
|
||||
);
|
||||
}
|
||||
|
||||
fn create_edit_mode(config: &GlobalConfig) -> Box<dyn EditMode> {
|
||||
|
Loading…
Reference in New Issue
Block a user