Add dry run

This commit is contained in:
Azwar Tamim 2024-08-28 21:50:06 +07:00
parent 25845f5d5a
commit 4006f3f417
3 changed files with 9 additions and 0 deletions

View File

@ -179,6 +179,7 @@ Application Options:
-u, --url= Choose ollama url (default: http://127.0.0.1:11434)
-o, --output= Output to file
-n, --latest= Number of latest patterns to list (default: 0)
--dry-run Show what would be sent to the model without actually sending it
Help Options:
-h, --help Show this help message

View File

@ -142,6 +142,13 @@ func Cli() (message string, err error) {
}
}
if currentFlags.DryRun {
fmt.Println("Dry run: Would send the following request:")
fmt.Printf("Chat Request: %+v\n", currentFlags.BuildChatRequest())
fmt.Printf("Chat Options: %+v\n", currentFlags.BuildChatOptions())
return
}
var chatter *core.Chatter
if chatter, err = fabric.GetChatter(currentFlags.Model, currentFlags.Stream); err != nil {
return

View File

@ -37,6 +37,7 @@ type Flags struct {
YouTube string `short:"y" long:"youtube" description:"YouTube video url to grab transcript, comments from it and send to chat"`
YouTubeTranscript bool `long:"transcript" description:"Grab transcript from YouTube video and send to chat"`
YouTubeComments bool `long:"comments" description:"Grab comments from YouTube video and send to chat"`
DryRun bool `long:"dry-run" description:"Show what would be sent to the model without actually sending it"`
}
// Init Initialize flags. returns a Flags struct and an error