diff --git a/Cargo.lock b/Cargo.lock index ce9c495..0989cc9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -28,7 +28,7 @@ dependencies = [ [[package]] name = "aichat" -version = "0.16.0" +version = "0.17.0" dependencies = [ "ansi_colours", "anyhow", @@ -106,47 +106,48 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.13" +version = "0.6.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" +checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -154,9 +155,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.82" +version = "1.0.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f538837af36e6f6a9be0faa67f9a314f8119e4e4b5867c6ab40ed60360142519" +checksum = "25bdb32cbbdce2b519a9cd7df3a678443100e265d5e25ca763b7572a5104f5f3" [[package]] name = "arboard" @@ -198,9 +199,9 @@ dependencies = [ [[package]] name = "autocfg" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "aws-smithy-eventstream" @@ -215,9 +216,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abe14dceea1e70101d38fbf2a99e6a34159477c0fb95e68e05c66bd7ae4c3729" +checksum = "baf98d97bba6ddaba180f1b1147e202d8fe04940403a95a3f826c790f931bbd1" dependencies = [ "base64-simd", "bytes", @@ -344,9 +345,9 @@ checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytemuck" -version = "1.15.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d6d68c57235a3a081186990eca2867354726650f42f7516ca50c28d6281fd15" +checksum = "78834c15cb5d5efe3452d58b1e8ba890dd62d21907f867f383358198e56ebca5" [[package]] name = "byteorder" @@ -372,9 +373,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.96" +version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "065a29261d53ba54260972629f9ca6bffa69bac13cd1fed61420f7fa68b9f8bd" +checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4" [[package]] name = "cfg-if" @@ -454,9 +455,9 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" [[package]] name = "core-foundation" @@ -632,9 +633,9 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", @@ -848,9 +849,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", @@ -1105,6 +1106,12 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "is_terminal_polyfill" +version = "1.70.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" + [[package]] name = "itertools" version = "0.12.1" @@ -1330,9 +1337,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] @@ -1513,9 +1520,9 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", "indexmap", @@ -1581,9 +1588,9 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "proc-macro2" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1597b0c024618f09a9c3b8655b7e430397a36d23fdafec26d6965e9eec3eba" +checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b" dependencies = [ "unicode-ident", ] @@ -1778,9 +1785,9 @@ dependencies = [ [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" [[package]] name = "rustc-hash" @@ -1840,9 +1847,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.5.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "beb461507cee2c2ff151784c52762cf4d9ff6a61f3e80968600ed24fa837fa54" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" [[package]] name = "rustls-webpki" @@ -1857,15 +1864,15 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80af6f9131f277a45a3fba6ce8e2258037bb0477a67e610d3c1fe046ab31de47" +checksum = "092474d1a01ea8278f69e6a358998405fae5b8b963ddaeb2b0b04a128bf1dfb0" [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" @@ -1899,11 +1906,11 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "security-framework" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" +checksum = "c627723fd09706bacdb5cf41499e95098555af3c3c29d014dc3c458ef6be11c0" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.5.0", "core-foundation", "core-foundation-sys", "libc", @@ -1912,9 +1919,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.10.0" +version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" +checksum = "317936bbbd05227752583946b9e66d7ce3b489f84e11a94a510b4437fef407d7" dependencies = [ "core-foundation-sys", "libc", @@ -1922,18 +1929,18 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.199" +version = "1.0.201" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9f6e76df036c77cd94996771fb40db98187f096dd0b9af39c6c6e452ba966a" +checksum = "780f1cebed1629e4753a1a38a3c72d30b97ec044f0aef68cb26650a3c5cf363c" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.199" +version = "1.0.201" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11bd257a6541e141e42ca6d24ae26f7714887b47e89aa739099104c7e4d3b7fc" +checksum = "c5e405930b9796f1c00bee880d03fc7e0bb4b9a11afc776885ffe84320da2865" dependencies = [ "proc-macro2", "quote", @@ -1942,9 +1949,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.116" +version = "1.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e17db7126d17feb94eb3fad46bf1a96b034e8aacbc2e775fe81505f8b0b2813" +checksum = "455182ea6142b14f93f4bc5320a2b31c1f266b66a4a5c858b013302a5d8cbfc3" dependencies = [ "indexmap", "itoa", @@ -2123,9 +2130,9 @@ checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" [[package]] name = "syn" -version = "2.0.60" +version = "2.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "909518bc7b1c9b779f1bbf07f2929d35af9f0f37e47c6e9ef7f9dddc1e1821f3" +checksum = "bf5be731623ca1a1fb7d8be6f261a3be6d3e2337b8a1f97be944d020c8fcb704" dependencies = [ "proc-macro2", "quote", @@ -2193,18 +2200,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0126ad08bff79f29fc3ae6a55cc72352056dfff61e3ff8bb7129476d44b23aa" +checksum = "579e9083ca58dd9dcf91a9923bb9054071b9ebbd800b342194c9feb0ee89fc18" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.59" +version = "1.0.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cd413b5d558b4c5bf3680e324a6fa5014e7b7c067a51e69dbdf47eb7148b66" +checksum = "e2470041c06ec3ac1ab38d0356a6119054dedaea53e12fbefc0de730a1c08524" dependencies = [ "proc-macro2", "quote", @@ -2348,16 +2355,15 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -3033,9 +3039,9 @@ dependencies = [ [[package]] name = "x11rb" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8f25ead8c7e4cba123243a6367da5d3990e0d3affa708ea19dce96356bd9f1a" +checksum = "5d91ffca73ee7f68ce055750bf9f6eca0780b8c85eff9bc046a3b0da41755e12" dependencies = [ "gethostname", "rustix", @@ -3044,9 +3050,9 @@ dependencies = [ [[package]] name = "x11rb-protocol" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e63e71c4b8bd9ffec2c963173a4dc4cbde9ee96961d4fcb4429db9929b606c34" +checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d" [[package]] name = "zeroize" diff --git a/Cargo.toml b/Cargo.toml index 9228c32..c04f188 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aichat" -version = "0.16.0" +version = "0.17.0" edition = "2021" authors = ["sigoden "] description = "All-in-one AI-Powered CLI Chat & Copilot" diff --git a/README.md b/README.md index f7707d2..a421891 100644 --- a/README.md +++ b/README.md @@ -4,34 +4,48 @@ [![Crates](https://img.shields.io/crates/v/aichat.svg)](https://crates.io/crates/aichat) [![Discord](https://img.shields.io/discord/1226737085453701222?label=Discord)](https://discord.gg/mr3ZZUB9hG) -Aichat is a AI-powered CLI chat and copilot tool that seamlessly integrates with over 10 leading AI platforms, providing a powerful combination of chat-based interaction, context-aware conversations, and AI-assisted shell capabilities, all within a customizable and user-friendly environment. +AIChat is a cutting-edge CLI chat and copilot tool that seamlessly integrates with over 10 leading AI platforms, providing a powerful combination of chat-based interaction, context-aware conversations, and AI-assisted shell capabilities, all within a customizable and user-friendly environment. -![command mode](https://github.com/sigoden/aichat/assets/4012553/2ab27e1b-4078-4ea3-a98f-591b36491685) +![AIChat Command](https://github.com/sigoden/aichat/assets/4012553/84ae8382-62be-41d0-a0f1-101b113c5bc7) -![chat-repl mode](https://github.com/sigoden/aichat/assets/4012553/13427d54-efd5-4f4c-b17b-409edd30dfa3) +![AIChat Chat-REPL](https://github.com/sigoden/aichat/assets/4012553/13470451-9502-4b3e-b49a-e66aa7760208) ## Key Features -* **Converse with Advanced AI:** Access and interact with 10+ leading AI platforms including OpenAI, Claude, Gemini, and more, all within one interface. -* **Streamline Your Workflow:** Generate code, execute shell commands using natural language, and automate tasks with AI assistance. -* **Unleash Your Creativity:** Utilize AI for writing, translation, image analysis, and exploring new ideas. -* **Customize Your Experience:** Configure settings, create custom roles for AI, and personalize your chat interface. -* **Empower Your Terminal:** Integrate AI into your shell for intelligent autocompletion and command suggestions. -* **Context & Session Management:** Maintain context within conversations and manage multiple sessions effortlessly. +- Integrate with 20+ AI platforms +- Support [Chat-REPL](#chat-repl) +- Support [Roles](#defining-roles) +- Support sessions (context-aware conversation) +- Support image analysis (vision) +- [Shell commands](#shell-commands): Execute commands using natural language +- [Shell integration](#shell-integration): AI-based shell auto-completion +- Support extensive configuration and theme customization +- Support stream/non-stream +- Provide access to all LLMs using OpenAI format API +- Host LLM playground/arena web applications ## Supported AI Platforms - OpenAI GPT-3.5/GPT-4 (paid, vision) -- Azure OpenAI (paid) -- OpenAI-Compatible platforms -- Gemini: Gemini-1.0/Gemini-1.5 (free, vision) -- VertexAI (paid, vision) +- Gemini: Gemini-1.0/Gemini-1.5 (free, paid, vision) - Claude: Claude-3 (vision, paid) - Mistral (paid) -- Cohere (paid) +- Cohere: Command-R/Command-R+ (paid) +- Perplexity: Llama-3/Mixtral (paid) +- Groq: Llama-3/Mixtral/Gemma (free) - Ollama (free, local) +- Azure OpenAI (paid) +- VertexAI: Gemini-1.0/Gemini-1.5 (paid, vision) +- VertexAI-Claude: Claude-3 (paid, vision) +- Bedrock: Llama-3/Claude-3/Mistral (paid, vision) +- Cloudflare (free, paid, vision) +- Replicate (paid) - Ernie (paid) - Qianwen (paid, vision) +- Moonshot (paid) +- ZhipuAI: GLM-3.5/GLM-4 (paid, vision) +- Deepseek (paid) +- Other openAI-compatible platforms ## Install @@ -49,18 +63,18 @@ Download pre-built binaries for macOS, Linux, and Windows from [GitHub Releases] ## Configuration -Upon first launch, Aichat will guide you through the configuration process. An example configuration file is provided below: +Upon first launch, AIChat will guide you through the configuration process. An example configuration file is provided below: ``` > No config file, create a new one? Yes > AI Platform: openai > API Key: -✨ Saved config file to /aichat/config.yaml +✨ Saved config file to /aichat/config.yaml ``` Feel free to adjust the configuration according to your needs. -> Get `config.yaml` path with command `aichat --info` or repl command `.info`. +> πŸ’‘ Use the `AICHAT_CONFIG_DIR` environment variable to custom the config dir for aichat files. ```yaml model: openai:gpt-3.5-turbo # Specify the language model to use @@ -93,9 +107,11 @@ clients: models: - name: llama3 max_input_tokens: 8192 + + ... ``` -Refer to the [config.example.yaml](config.example.yaml) file for a complete list of configuration options. Environment variables can also be used for configuration; see the [Environment Variables](https://github.com/sigoden/aichat/wiki/Environment-Variables) page for details. +Refer to the [config.example.yaml](config.example.yaml) file for a complete list of configuration options. ## Command line @@ -107,9 +123,11 @@ Arguments: Options: -m, --model Select a LLM model + --prompt Use the system prompt -r, --role Select a role -s, --session [] Start or join a session --save-session Forces the session to be saved + --serve [
] Serve the LLM API and WebAPP -e, --execute Execute commands in natural language -c, --code Output code only -f, --file Include files with the message @@ -143,9 +161,9 @@ aichat -s session1 --info # View session info cat data.toml | aichat -c to json > data.json # Pipe stdio/stdout -aichat -f data.toml -c to json > data.json # Attach files +aichat -f data.toml -c to json > data.json # Send files -aichat -f a.png -f b.png diff images # Attach images +aichat -f a.png -f b.png diff images # Send images ``` ### Shell commands @@ -156,19 +174,19 @@ Simply input what you want to do in natural language, and aichat will prompt and aichat -e ... ``` -Aichat is aware of OS and shell you are using, it will provide shell command for specific system you have. For instance, if you ask `aichat` to update your system, it will return a command based on your OS. Here's an example using macOS: +![aichat-execute](https://github.com/sigoden/aichat/assets/4012553/a52edf31-b642-4bf9-8454-128ba2c387df) + +AIChat is aware of OS and shell you are using, it will provide shell command for specific system you have. For instance, if you ask `aichat` to update your system, it will return a command based on your OS. Here's an example using macOS: ``` $ aichat -e update my system -# sudo softwareupdate -i -a -? [1]:execute [2]:explain [3]:revise [4]:cancel (1) +? sudo softwareupdate -i -a ``` The same prompt, when used on Ubuntu, will generate a different suggestion: ``` $ aichat -e update my system -sudo apt update && sudo apt upgrade -y -? [1]:execute [2]:explain [3]:revise [4]:cancel (1) +? sudo apt update && sudo apt upgrade -y ``` ### Shell integration @@ -189,24 +207,25 @@ By using the `--code` or `-c` parameter, you can specifically request pure code ## Chat REPL -Aichat has a powerful Chat REPL. +AIChat has a powerful Chat REPL. -**REPL Features:** -- **Convenient Tab Autocompletion:** Get suggestions for commands and functions while typing. -- **Customizable REPL Prompt:** Personalize the REPL interface by defining your own prompt. -- **Streamlined Keybindings:** Use familiar Emacs/Vi keybindings for efficient navigation and editing. -- **Multi-line Editing:** Create and edit multi-line inputs with ease. -- **External Editor Integration:** Open an external editor to refine the current inputs or write longer inputs. -- **History and Undo Support:** Access previously executed commands and undo any actions you make. +REPL Features: -### `.help` - print help message +- Tab auto-completion +- [Custom REPL Prompt](https://github.com/sigoden/aichat/wiki/Custom-REPL-Prompt) +- Emacs/VI keybinding +- Edit/paste multi-line text +- Open an editor to edit the current prompt +- History and Undo + +### `.help` - show help message ``` > .help .help Show this help message .info View system info .model Change the current LLM -.prompt Make a temporary role using a prompt +.prompt Create a temporary role using a prompt .role Switch to a specific role .info role View role info .exit role Leave the role @@ -215,13 +234,13 @@ Aichat has a powerful Chat REPL. .save session Save the chat to file .clear messages Erase messages in the current session .exit session End the current session -.file Read files and send them as input +.file Include files with the message .set Adjust settings .copy Copy the last response .exit Exit the REPL Type ::: to start multi-line editing, type ::: to finish it. -Press Ctrl+O to open an editor to edit line input. +Press Ctrl+O to open an editor to edit the input buffer. Press Ctrl+C to cancel the response, Ctrl+D to exit the REPL ``` @@ -230,7 +249,9 @@ Press Ctrl+C to cancel the response, Ctrl+D to exit the REPL ``` > .info model openai:gpt-3.5-turbo +max_output_tokens 4096 (current model) temperature - +top_p - dry_run false save true save_session - @@ -248,16 +269,19 @@ messages_file /home/alice/.config/aichat/messages.md sessions_dir /home/alice/.config/aichat/sessions ``` -### `.model` - choose a model +> πŸ’‘ Run `.info role` to view your current role information. +> πŸ’‘ Run `.info session` to view your current session information. + +### `.model` - change the current LLM ``` > .model openai:gpt-4 -> .model ollama:llama2 +> .model ollama:llama3 ``` -> You can easily enter model name using tab autocompletion. +> Tab autocompletion helps in quickly typing the model names. -### `.role` - let the AI play a role +### `.role` - switch to a specific role Select a role: @@ -281,26 +305,15 @@ emoji> .exit role Hello there! How can I assist you today? ``` -Show role info: - -``` -emoji> .info role -name: emoji -prompt: I want you to translate the sentences I write into emojis. I will write the sentence, and you will express it with emojis. I just want you to express it with emojis. I don't want you to reply with anything but emoji. When I need to tell you something in English, I will do it by wrapping it in curly brackets like {like this}. -temperature: null -``` - -Temporarily use a role to send a message. +Temporarily use a role without switching to it: ``` -> ::: .role emoji -hello world -::: -πŸ‘‹πŸŒ +> .role emoji hello +πŸ‘‹ -> +> ``` -### `.session` - context-aware conversation +### `.session` - Begin a chat session By default, aichat behaves in a one-off request/response manner. @@ -321,22 +334,19 @@ temp) .exit session ``` -The prompt on the right side is about the current usage of tokens and the proportion of tokens used, -compared to the maximum number of tokens allowed by the model. - - -### `.prompt` - make a temporary role using a prompt +### `.prompt` - create a temporary role using a prompt There are situations where setting a system message is necessary, but modifying the `roles.yaml` file is undesirable. To address this, we leverage the `.prompt` to create a temporary role specifically for this purpose. ``` -> .prompt write unit tests for the rust functions +> .prompt your are a js console -%%> +%%> Date.now() +1658333431437 ``` -### `.file` - include files with the message +### `.file` - read files and send them as input ``` Usage: .file ... [-- text...] @@ -347,52 +357,91 @@ Usage: .file ... [-- text...] .file https://ibb.co/a.png https://ibb.co/b.png -- what is the difference? ``` -> Only the current model that supports vision can process images submitted through `.file` command. +> The capability to process images through `.file` command depends on the current model’s vision support. -### `.set` - modify the configuration temporarily +### `.set` - adjust settings (non-persistent) ``` +.set max_output_tokens 4096 .set temperature 1.2 +.set top_p 0.8 .set compress_threshold 1000 .set dry_run true -.set highlight false -.set save false -.set save_session true -.set auto_copy true ``` -### Roles +## Server + +AIChat comes with a built-in lightweight web server. + +``` +$ aichat --serve +Chat Completions API: http://127.0.0.1:8000/v1/chat/completions +LLM Playground: http://127.0.0.1:8000/playground +LLM ARENA: http://127.0.0.1:8000/arena + +$ aichat --serve 0.0.0.0:8080 # to specify a different server address +``` + +### OpenAI format API + +AIChat offers the ability to function as a proxy server for all LLMs. This allows you to interact with different LLMs using the familiar OpenAI API format, simplifying the process of accessing and utilizing these LLMs. + +Test with curl: + +```sh +curl -X POST -H "Content-Type: application/json" -d '{ + "model":"claude:claude-3-opus-20240229", + "messages":[{"role":"user","content":"hello"}], + "stream":true +}' http://127.0.0.1:8000/v1/chat/completions +``` + +### LLM Playground + +The LLM Playground is a webapp that allows you to interact with any LLM supported by AIChat directly in your browser. + +![image](https://github.com/sigoden/aichat/assets/4012553/68043aa3-5778-4688-9c2f-3d96aa600b7a) + +### LLM Arena -We can define a batch of roles in `roles.yaml`. +The LLM Arena is a web-based platform where you can compare different LLMs side-by-side. -> Get `roles.yaml` path with command `aichat --info` or repl command `.info`. +![image](https://github.com/sigoden/aichat/assets/4012553/dc6dbf5a-488f-4bf4-a710-f1f9fc76933b) -For example, we can define a role: +## Defining Roles + +The `roles.yaml` file allows you to define a variety of roles, each with its own unique prompt and behavior. This enables the LLM to adapt to specific tasks and provide tailored responses. + +We can define a role like this: ```yaml -- name: shell +- name: emoji prompt: > - I want you to act as a Linux shell expert. - I want you to answer only with bash code. - Do not provide explanations. + I want you to translate the sentences I write into emojis. + I will write the sentence, and you will express it with emojis. + I don't want you to reply with anything but emoji. ``` -Let LLM answer questions in the role of a Linux shell expert. +This enables the LLM to respond as a Linux shell expert. ``` -> .role shell +> .role emoji -shell> extract encrypted zipfile app.zip to /tmp/app -mkdir /tmp/app -unzip -P PASSWORD app.zip -d /tmp/app +emoji> fire +πŸ”₯ ``` -For more details about roles, please visit [Role Guide](https://github.com/sigoden/aichat/wiki/Role-Guide). +## Wikis + +- [Role Guide](https://github.com/sigoden/aichat/wiki/Role-Guide) +- [Environment Variables](https://github.com/sigoden/aichat/wiki/Environment-Variables) +- [Custom REPL Prompt](https://github.com/sigoden/aichat/wiki/Custom-REPL-Prompt) +- [Custom Theme](https://github.com/sigoden/aichat/wiki/Custom-Theme) ## License Copyright (c) 2023-2024 aichat-developers. -Aichat is made available under the terms of either the MIT License or the Apache License 2.0, at your option. +AIChat is made available under the terms of either the MIT License or the Apache License 2.0, at your option. See the LICENSE-APACHE and LICENSE-MIT files for license details.