mirror of
https://github.com/sigoden/aichat
synced 2024-11-18 09:28:27 +00:00
refactor: minor improvements
- config.example.yaml comments - `--serve` description - No model error
This commit is contained in:
parent
7bda1eace2
commit
212ff1674e
@ -79,7 +79,7 @@ clients:
|
||||
api_base: http://localhost:8080/v1
|
||||
api_key: sk-xxx # ENV: {client_name}_API_BASE
|
||||
chat_endpoint: /chat/completions # Optional
|
||||
models:
|
||||
models: # Required
|
||||
- name: llama3
|
||||
max_input_tokens: 8192
|
||||
|
||||
@ -88,16 +88,16 @@ clients:
|
||||
api_base: http://localhost:11434
|
||||
api_auth: Basic xxx # ENV: {client_name}_API_AUTH
|
||||
chat_endpoint: /api/chat # Optional
|
||||
models:
|
||||
models: # Required
|
||||
- name: llama3
|
||||
max_input_tokens: 8192
|
||||
|
||||
# See https://learn.microsoft.com/en-us/azure/ai-services/openai/chatgpt-quickstart
|
||||
- type: azure-openai
|
||||
api_base: https://{RESOURCE}.openai.azure.com
|
||||
api_key: xxx # ENV: {client_name}_API_BASE
|
||||
models:
|
||||
- name: gpt-35-turbo # Model deployment name
|
||||
api_key: xxx # ENV: {client_name}_API_BASE
|
||||
models: # Required
|
||||
- name: gpt-35-turbo # Model deployment name
|
||||
max_input_tokens: 8192
|
||||
|
||||
# See https://cloud.google.com/vertex-ai
|
||||
|
@ -4,7 +4,7 @@ complete -c aichat -s s -l session -x -a"(aichat --list-sessions)" -d 'Start or
|
||||
complete -c aichat -s f -l file -d 'Include files with the message' -r -F
|
||||
complete -c aichat -s w -l wrap -d 'Control text wrapping (no, auto, <max-width>)'
|
||||
complete -c aichat -l save-session -d 'Forces the session to be saved'
|
||||
complete -c aichat -l serve -d 'Serve all LLMs as OpenAI-compatible API'
|
||||
complete -c aichat -l serve -d 'Serve all LLMs via an OpenAI-compatible API'
|
||||
complete -c aichat -s e -l execute -d 'Execute commands in natural language'
|
||||
complete -c aichat -s c -l code -d 'Output code only'
|
||||
complete -c aichat -s H -l no-highlight -d 'Turn off syntax highlighting'
|
||||
|
@ -28,7 +28,7 @@ module completions {
|
||||
--role(-r): string@"nu-complete aichat role" # Select a role
|
||||
--session(-s): string@"nu-complete aichat role" # Start or join a session
|
||||
--save-session # Forces the session to be saved
|
||||
--serve # Serve all LLMs as OpenAI-compatible API
|
||||
--serve # Serve all LLMs via an OpenAI-compatible API
|
||||
--execute(-e) # Execute commands in natural language
|
||||
--code(-c) # Output code only
|
||||
--file(-f): string # Include files with the message
|
||||
|
@ -31,7 +31,7 @@ Register-ArgumentCompleter -Native -CommandName 'aichat' -ScriptBlock {
|
||||
[CompletionResult]::new('-w', '-w', [CompletionResultType]::ParameterName, 'Control text wrapping (no, auto, <max-width>)')
|
||||
[CompletionResult]::new('--wrap', '--wrap', [CompletionResultType]::ParameterName, 'Control text wrapping (no, auto, <max-width>)')
|
||||
[CompletionResult]::new('--save-session', '--save-session', [CompletionResultType]::ParameterName, 'Forces the session to be saved')
|
||||
[CompletionResult]::new('--serve', '--serve', [CompletionResultType]::ParameterName, 'Serve all LLMs as OpenAI-compatible API')
|
||||
[CompletionResult]::new('--serve', '--serve', [CompletionResultType]::ParameterName, 'Serve all LLMs via an OpenAI-compatible API')
|
||||
[CompletionResult]::new('-e', '-e', [CompletionResultType]::ParameterName, 'Execute commands in natural language')
|
||||
[CompletionResult]::new('--execute', '--execute', [CompletionResultType]::ParameterName, 'Execute commands in natural language')
|
||||
[CompletionResult]::new('-c', '-c', [CompletionResultType]::ParameterName, 'Output code only')
|
||||
|
@ -26,7 +26,7 @@ _aichat() {
|
||||
'-w+[Control text wrapping (no, auto, <max-width>)]:WRAP: ' \
|
||||
'--wrap=[Control text wrapping (no, auto, <max-width>)]:WRAP: ' \
|
||||
'--save-session[Forces the session to be saved]' \
|
||||
'--serve[Serve all LLMs as OpenAI-compatible API]' \
|
||||
'--serve[Serve all LLMs via an OpenAI-compatible API]' \
|
||||
'-e[Execute commands in natural language]' \
|
||||
'--execute[Execute commands in natural language]' \
|
||||
'-c[Output code only]' \
|
||||
|
@ -15,7 +15,7 @@ pub struct Cli {
|
||||
/// Forces the session to be saved
|
||||
#[clap(long)]
|
||||
pub save_session: bool,
|
||||
/// Serve all LLMs as OpenAI-compatible API
|
||||
/// Serve all LLMs via an OpenAI-compatible API
|
||||
#[clap(long, value_name = "ADDRESS")]
|
||||
pub serve: Option<Option<String>>,
|
||||
/// Execute commands in natural language
|
||||
|
@ -396,7 +396,7 @@ impl Config {
|
||||
let models = list_models(self);
|
||||
let model = Model::find(&models, value);
|
||||
match model {
|
||||
None => bail!("Invalid model '{}'", value),
|
||||
None => bail!("No model '{}'", value),
|
||||
Some(model) => {
|
||||
if let Some(session) = self.session.as_mut() {
|
||||
session.set_model(model.clone())?;
|
||||
|
Loading…
Reference in New Issue
Block a user