From d7ca76cc5ccf9fa8a9895759e3b7edcc963c5e47 Mon Sep 17 00:00:00 2001 From: Jonathan Dunn Date: Mon, 1 Apr 2024 10:45:45 -0400 Subject: [PATCH] updated readme --- README.md | 68 +++++++++++++--------------------- installer/client/cli/fabric.py | 3 +- 2 files changed, 27 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index f78fd93..b9c43c6 100644 --- a/README.md +++ b/README.md @@ -46,7 +46,7 @@ - [Directly calling Patterns](#directly-calling-patterns) - [Examples](#examples) - [Custom Patterns](#custom-patterns) -- [Helper Apps](#helper-apps) +- [Helper Apps](#helper-apps) - [Meta](#meta) - [Primary contributors](#primary-contributors) @@ -209,38 +209,8 @@ Once you have it all set up, here's how to use it. `fabric -h` ```bash -us the results in - realtime. NOTE: You will not be able to pipe the - output into another command. - --list, -l List available patterns - --clear Clears your persistent model choice so that you can - once again use the --model flag - --update, -u Update patterns. NOTE: This will revert the default - model to gpt4-turbo. please run --changeDefaultModel - to once again set default model - --pattern PATTERN, -p PATTERN - The pattern (prompt) to use - --setup Set up your fabric instance - --changeDefaultModel CHANGEDEFAULTMODEL - Change the default model. For a list of available - models, use the --listmodels flag. - --model MODEL, -m MODEL - Select the model to use. NOTE: Will not work if you - have set a default model. please use --clear to clear - persistence before using this flag - --listmodels List all available models - --remoteOllamaServer REMOTEOLLAMASERVER - The URL of the remote ollamaserver to use. ONLY USE - THIS if you are using a local ollama server in an non- - deault location or port - --context, -c Use Context file (context.md) to add context to your - pattern -age: fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}] - [--output [OUTPUT]] [--stream] [--list] [--clear] [--update] - [--pattern PATTERN] [--setup] - [--changeDefaultModel CHANGEDEFAULTMODEL] [--model MODEL] - [--listmodels] [--remoteOllamaServer REMOTEOLLAMASERVER] - [--context] +usage: fabric [-h] [--text TEXT] [--copy] [--agents {trip_planner,ApiKeys}] [--output [OUTPUT]] [--gui] [--stream] [--list] [--update] [--pattern PATTERN] [--setup] [--changeDefaultModel CHANGEDEFAULTMODEL] + [--model MODEL] [--listmodels] [--remoteOllamaServer REMOTEOLLAMASERVER] [--context] An open source framework for augmenting humans using AI. @@ -249,12 +219,24 @@ options: --text TEXT, -t TEXT Text to extract summary from --copy, -C Copy the response to the clipboard --agents {trip_planner,ApiKeys}, -a {trip_planner,ApiKeys} - Use an AI agent to help you with a task. Acceptable - values are 'trip_planner' or 'ApiKeys'. This option - cannot be used with any other flag. + Use an AI agent to help you with a task. Acceptable values are 'trip_planner' or 'ApiKeys'. This option cannot be used with any other flag. --output [OUTPUT], -o [OUTPUT] Save the response to a file - --stream, -s Use this option if you want to see + --gui Use the GUI (Node and npm need to be installed) + --stream, -s Use this option if you want to see the results in realtime. NOTE: You will not be able to pipe the output into another command. + --list, -l List available patterns + --update, -u Update patterns. NOTE: This will revert the default model to gpt4-turbo. please run --changeDefaultModel to once again set default model + --pattern PATTERN, -p PATTERN + The pattern (prompt) to use + --setup Set up your fabric instance + --changeDefaultModel CHANGEDEFAULTMODEL + Change the default model. For a list of available models, use the --listmodels flag. + --model MODEL, -m MODEL + Select the model to use. NOTE: Will not work if you have set a default model. please use --clear to clear persistence before using this flag + --listmodels List all available models + --remoteOllamaServer REMOTEOLLAMASERVER + The URL of the remote ollamaserver to use. ONLY USE THIS if you are using a local ollama server in an non-deault location or port + --context, -c Use Context file (context.md) to add context to your pattern ``` #### Example commands @@ -515,7 +497,7 @@ options: -h, --help Show this help message and exit --duration Output only the duration --transcript Output only the transcript - --comments Output only the user comments + --comments Output only the user comments ``` ## ts (Audio transcriptions) @@ -536,7 +518,7 @@ windows: download instructions https://www.ffmpeg.org/download.html ``` -````bash +```bash ts -h usage: ts [-h] audio_file @@ -547,17 +529,17 @@ positional arguments: options: -h, --help show this help message and exit -```` +``` + ## Save `save` is a "tee-like" utility to pipeline saving of content, while keeping the output stream intact. Can optionally generate "frontmatter" for PKM utilities like Obsidian via the "FABRIC_FRONTMATTER" environment variable - - If you'd like to default variables, set them in `~/.config/fabric/.env`. `FABRIC_OUTPUT_PATH` needs to be set so `save` where to write. `FABRIC_FRONTMATTER_TAGS` is optional, but useful for tracking how tags have entered your PKM, if that's important to you. ### usage + ```bash usage: save [-h] [-t, TAG] [-n] [-s] [stub] @@ -572,7 +554,7 @@ options: -t, TAG, --tag TAG add an additional frontmatter tag. Use this argument multiple timesfor multiple tags -n, --nofabric don't use the fabric tags, only use tags from --tag -s, --silent don't use STDOUT for output, only save to the file -```` +``` ### Example diff --git a/installer/client/cli/fabric.py b/installer/client/cli/fabric.py index f05e4fb..cbbc68a 100755 --- a/installer/client/cli/fabric.py +++ b/installer/client/cli/fabric.py @@ -28,7 +28,8 @@ def main(): const="analyzepaper.txt", default=None, ) - parser.add_argument("--gui", help="Use the GUI", action="store_true") + parser.add_argument( + "--gui", help="Use the GUI (Node and npm need to be installed)", action="store_true") parser.add_argument( "--stream", "-s",