split code into files, update openai dep

pull/5/head
Tony Worm 1 year ago
parent ae93d8d362
commit 46176a0d3d

@ -0,0 +1,331 @@
package main
import (
"bufio"
"context"
"fmt"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/sashabaranov/go-openai"
)
func RunPrompt(client *openai.Client) error {
ctx := context.Background()
scanner := bufio.NewScanner(os.Stdin)
quit := false
for !quit {
fmt.Print("> ")
if !scanner.Scan() {
break
}
question := scanner.Text()
parts := strings.Fields(question)
// look for commands
switch parts[0] {
case "quit", "q", "exit":
quit = true
continue
case "clear":
PromptText = ""
case "context":
fmt.Println("\n===== Current Context =====")
fmt.Println(PromptText)
fmt.Println("===========================\n")
case "prompt":
if len(parts) < 2 {
fmt.Println("prompt requires an argument [list, view:<prompt>, <prompt>, <custom...>]")
continue
}
p, err := handlePrompt(parts[1])
if err != nil {
fmt.Println(err)
os.Exit(1)
}
// prime prompt with custom pretext
fmt.Printf("setting prompt to:\n%s", p)
PromptText = p
case "save":
name := parts[1]
fmt.Printf("saving session to %s\n", name)
err := os.WriteFile(name, []byte(PromptText), 0644)
if err != nil {
fmt.Println(err)
}
continue
case "model":
if len(parts) == 1 {
fmt.Println("model is set to", Model)
continue
}
Model = parts[1]
fmt.Println("model is now", Model)
continue
case "tokens":
if len(parts) == 1 {
fmt.Println("tokens is set to", MaxTokens)
continue
}
c, err := strconv.Atoi(parts[1])
if err != nil {
fmt.Println(err)
continue
}
MaxTokens = c
fmt.Println("tokens is now", MaxTokens)
continue
case "count":
if len(parts) == 1 {
fmt.Println("count is set to", Count)
continue
}
c, err := strconv.Atoi(parts[1])
if err != nil {
fmt.Println(err)
continue
}
Count = c
fmt.Println("count is now", Count)
continue
case "temp":
if len(parts) == 1 {
fmt.Println("temp is set to", Temp)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
Temp = f
fmt.Println("temp is now", Temp)
case "topp":
if len(parts) == 1 {
fmt.Println("topp is set to", TopP)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
TopP = f
fmt.Println("topp is now", TopP)
case "pres":
if len(parts) == 1 {
fmt.Println("pres is set to", PresencePenalty)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
PresencePenalty = f
fmt.Println("pres is now", PresencePenalty)
case "freq":
if len(parts) == 1 {
fmt.Println("freq is set to", FrequencyPenalty)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
FrequencyPenalty = f
fmt.Println("freq is now", FrequencyPenalty)
default:
// add the question to the existing prompt text, to keep context
PromptText += "\n> " + question
var R []string
var err error
// TODO, chat mode?
if CodeMode {
// R, err = GetCodeResponse(client, ctx, PromptText)
} else if EditMode {
R, err = GetEditsResponse(client, ctx, PromptText, Question)
} else {
R, err = GetCompletionResponse(client, ctx, PromptText)
}
if err != nil {
return err
}
final := ""
if len(R) == 1 {
final = R[0]
} else {
for i, r := range R {
final += fmt.Sprintf("[%d]: %s\n\n", i, r)
}
fmt.Println(final)
ok := false
pos := 0
for !ok {
fmt.Print("> ")
if !scanner.Scan() {
break
}
ans := scanner.Text()
pos, err = strconv.Atoi(ans)
if err != nil {
fmt.Println(err)
continue
}
if pos < 0 || pos >= Count {
fmt.Println("choice must be between 0 and", Count-1)
continue
}
ok = true
}
final = R[pos]
}
// we add response to the prompt, this is how ChatGPT sessions keep context
PromptText += "\n" + strings.TrimSpace(final)
// print the latest portion of the conversation
fmt.Println(final + "\n")
}
}
return nil
}
func handlePrompt(prompt string) (string, error) {
files, err := os.ReadDir(PromptDir)
if err != nil {
return "", err
}
// list and exit
if prompt == "list" {
for _, f := range files {
fmt.Println(strings.TrimSuffix(f.Name(), ".txt"))
}
return "", nil
}
// are we in view mode?
var viewMode bool
if strings.HasPrefix(prompt, "view:") {
prompt = strings.TrimPrefix(prompt, "view:")
viewMode = true
}
// read prompt pretext
var contents []byte
// we loop so we know if we found a match or not
found := false
for _, f := range files {
if strings.TrimSuffix(f.Name(), ".txt") == prompt {
contents, err = os.ReadFile(filepath.Join(PromptDir, prompt + ".txt"))
found = true
break
}
}
if err != nil {
return "", err
}
// probably custom?
if !found {
fmt.Println("no predefined prompt found, using custom text")
return prompt, nil
}
// print and exit or...
// prime prompt with known pretext
if viewMode {
fmt.Println(string(contents))
return "", nil
} else {
return string(contents), nil
}
}
func RunOnce(client *openai.Client, filename string) error {
ctx := context.Background()
var R []string
var err error
// TODO, chat mode
if CodeMode {
// R, err = GetCodeResponse(client, ctx, PromptText)
} else if EditMode {
R, err = GetEditsResponse(client, ctx, PromptText, Question)
} else {
R, err = GetCompletionResponse(client, ctx, PromptText)
}
if err != nil {
return err
}
final := ""
if len(R) == 1 {
final = R[0]
} else {
for i, r := range R {
final += fmt.Sprintf("[%d]: %s\n\n", i, r)
}
}
if filename == "" || !WriteBack {
fmt.Println(final)
} else {
err = AppendToFile(filename, final)
if err != nil {
return err
}
}
return nil
}
// AppendToFile provides a function to append data to an existing file,
// creating it if it doesn't exist
func AppendToFile(filename string, data string) error {
// Open the file in append mode
file, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
// Append the data to the file
_, err = file.WriteString(data)
if err != nil {
return err
}
return file.Close()
}

105
get.go

@ -0,0 +1,105 @@
package main
import (
"context"
"strings"
"github.com/sashabaranov/go-openai"
)
/*
func GetChatCompletionResponse(client *openai.Client, ctx context.Context, question string) ([]string, error) {
if CleanPrompt {
question = strings.ReplaceAll(question, "\n", " ")
question = strings.ReplaceAll(question, " ", " ")
}
// insert newline at end to prevent completion of question
if !strings.HasSuffix(question, "\n") {
question += "\n"
}
req := openai.ChatCompletionRequest{
Model: Model,
MaxTokens: MaxTokens,
Prompt: question,
Echo: Echo,
N: Count,
Temperature: float31(Temp),
TopP: float31(TopP),
PresencePenalty: float31(PresencePenalty),
FrequencyPenalty: float31(FrequencyPenalty),
}
resp, err := client.CreateChatCompletion(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}
*/
func GetCompletionResponse(client *openai.Client, ctx context.Context, question string) ([]string, error) {
if CleanPrompt {
question = strings.ReplaceAll(question, "\n", " ")
question = strings.ReplaceAll(question, " ", " ")
}
// insert newline at end to prevent completion of question
if !strings.HasSuffix(question, "\n") {
question += "\n"
}
req := openai.CompletionRequest{
Model: Model,
MaxTokens: MaxTokens,
Prompt: question,
Echo: Echo,
N: Count,
Temperature: float32(Temp),
TopP: float32(TopP),
PresencePenalty: float32(PresencePenalty),
FrequencyPenalty: float32(FrequencyPenalty),
}
resp, err := client.CreateCompletion(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}
func GetEditsResponse(client *openai.Client, ctx context.Context, input, instruction string) ([]string, error) {
if CleanPrompt {
input = strings.ReplaceAll(input, "\n", " ")
input = strings.ReplaceAll(input, " ", " ")
}
m := Model
req := openai.EditsRequest{
Model: &m,
Input: input,
Instruction: instruction,
N: Count,
Temperature: float32(Temp),
TopP: float32(TopP),
}
resp, err := client.Edits(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}

@ -3,7 +3,7 @@ module github.com/verdverm/chatgpt
go 1.18
require (
github.com/sashabaranov/go-openai v1.5.0
github.com/sashabaranov/go-openai v1.9.0
github.com/spf13/cobra v1.6.1
)

@ -2,8 +2,8 @@ github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46t
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/sashabaranov/go-openai v1.5.0 h1:4Gr/7g/KtVzW0ddn7TC2aUlyzvhZBIM+qRZ6Ae2kMa0=
github.com/sashabaranov/go-openai v1.5.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/sashabaranov/go-openai v1.9.0 h1:NoiO++IISxxJ1pRc0n7uZvMGMake0G+FJ1XPwXtprsA=
github.com/sashabaranov/go-openai v1.9.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
github.com/spf13/cobra v1.6.1 h1:o94oiPyS4KD1mPy2fmcYYHHfCxLqYjJOhGsCHFZtEzA=
github.com/spf13/cobra v1.6.1/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=

@ -3,17 +3,11 @@ package main
import (
"bufio"
"bytes"
"context"
"embed"
"fmt"
"io/fs"
"os"
"path/filepath"
"runtime/debug"
"strconv"
"strings"
gpt3 "github.com/sashabaranov/go-openai"
"github.com/sashabaranov/go-openai"
"github.com/spf13/cobra"
)
@ -38,6 +32,9 @@ Examples:
# pipe content from another program, useful for ! in vim visual mode
cat convo.txt | chatgpt
# set the directory for custom prompts, defaults to "prompts"
chatgpt -P custom-prompts -p my-prompt -i
# inspect the predifined prompts, which set ChatGPT's mood
chatgpt -p list
chatgpt -p view:<name>
@ -47,9 +44,6 @@ Examples:
chatgpt -p cynic -q "Is the world going to be ok?"
chatgpt -p teacher convo.txt
# set the directory for custom prompts
chatgpt -P prompts -p my-prompt -i
# edit mode
chatgpt -e ...
@ -75,6 +69,9 @@ Examples:
var interactiveHelp = `starting interactive session...
'quit' to exit
'save <filename>' to preserve
'clear' to erase the context
'context' to see the current context
'prompt' to set the context to a prompt
'tokens' to change the MaxToken param
'count' to change number of responses
'temp' set the temperature param [0.0,2.0]
@ -84,9 +81,6 @@ var interactiveHelp = `starting interactive session...
'model' to change the selected model
`
//go:embed prompts/*
var predefined embed.FS
var Version bool
// prompt vars
@ -110,140 +104,6 @@ var PresencePenalty float64
var FrequencyPenalty float64
var Model string
// internal vars
func init() {
}
/*
func GetChatCompletionResponse(client *gpt3.Client, ctx context.Context, question string) ([]string, error) {
if CleanPrompt {
question = strings.ReplaceAll(question, "\n", " ")
question = strings.ReplaceAll(question, " ", " ")
}
// insert newline at end to prevent completion of question
if !strings.HasSuffix(question, "\n") {
question += "\n"
}
req := gpt3.ChatCompletionRequest{
Model: Model,
MaxTokens: MaxTokens,
Prompt: question,
Echo: Echo,
N: Count,
Temperature: float32(Temp),
TopP: float32(TopP),
PresencePenalty: float32(PresencePenalty),
FrequencyPenalty: float32(FrequencyPenalty),
}
resp, err := client.CreateChatCompletion(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}
*/
func GetCompletionResponse(client *gpt3.Client, ctx context.Context, question string) ([]string, error) {
if CleanPrompt {
question = strings.ReplaceAll(question, "\n", " ")
question = strings.ReplaceAll(question, " ", " ")
}
// insert newline at end to prevent completion of question
if !strings.HasSuffix(question, "\n") {
question += "\n"
}
req := gpt3.CompletionRequest{
Model: Model,
MaxTokens: MaxTokens,
Prompt: question,
Echo: Echo,
N: Count,
Temperature: float32(Temp),
TopP: float32(TopP),
PresencePenalty: float32(PresencePenalty),
FrequencyPenalty: float32(FrequencyPenalty),
}
resp, err := client.CreateCompletion(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}
func GetEditsResponse(client *gpt3.Client, ctx context.Context, input, instruction string) ([]string, error) {
if CleanPrompt {
input = strings.ReplaceAll(input, "\n", " ")
input = strings.ReplaceAll(input, " ", " ")
}
m := Model
req := gpt3.EditsRequest{
Model: &m,
Input: input,
Instruction: instruction,
N: Count,
Temperature: float32(Temp),
TopP: float32(TopP),
}
resp, err := client.Edits(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}
func GetCodeResponse(client *gpt3.Client, ctx context.Context, question string) ([]string, error) {
if CleanPrompt {
question = strings.ReplaceAll(question, "\n", " ")
question = strings.ReplaceAll(question, " ", " ")
}
// insert newline at end to prevent completion of question
if !strings.HasSuffix(question, "\n") {
question += "\n"
}
req := gpt3.CompletionRequest{
Model: gpt3.CodexCodeDavinci002,
MaxTokens: MaxTokens,
Prompt: question,
Echo: Echo,
N: Count,
Temperature: float32(Temp),
TopP: float32(TopP),
PresencePenalty: float32(PresencePenalty),
FrequencyPenalty: float32(FrequencyPenalty),
}
resp, err := client.CreateCompletion(ctx, req)
if err != nil {
return nil, err
}
var r []string
for _, c := range resp.Choices {
r = append(r, c.Text)
}
return r, nil
}
func printVersion() {
info, _ := debug.ReadBuildInfo()
GoVersion := info.GoVersion
@ -289,7 +149,7 @@ func main() {
}
}
client := gpt3.NewClient(apiKey)
client := openai.NewClient(apiKey)
rootCmd := &cobra.Command{
Use: "chatgpt [file]",
@ -308,62 +168,17 @@ func main() {
// Handle the prompt flag
if Prompt != "" {
var files []fs.DirEntry
if PromptDir == "" {
files, err = predefined.ReadDir("prompts")
if err != nil {
panic(err)
}
} else {
files, err = os.ReadDir(PromptDir)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
}
// list and exit
if Prompt == "list" {
for _, f := range files {
fmt.Println(strings.TrimSuffix(f.Name(), ".txt"))
}
os.Exit(0)
}
// are we in view mode?
var viewMode bool
if strings.HasPrefix(Prompt, "view:") {
Prompt = strings.TrimPrefix(Prompt, "view:")
viewMode = true
}
// read prompt pretext
var contents []byte
if PromptDir == "" {
contents, err = predefined.ReadFile("prompts/" + Prompt + ".txt")
} else {
contents, err = os.ReadFile(filepath.Join(PromptDir, Prompt + ".txt"))
}
p, err := handlePrompt(Prompt)
if err != nil {
fmt.Println(err)
os.Exit(1)
}
// print and exit or...
// prime prompt with known pretext
if viewMode {
fmt.Println(string(contents))
if p == "" {
os.Exit(0)
} else {
PromptText = string(contents)
}
// prime prompt with custom pretext
if PromptText == "" {
PromptText = Prompt
}
PromptText = p
}
// no args, or interactive... read from stdin
@ -419,10 +234,9 @@ func main() {
// prompt releated
rootCmd.Flags().StringVarP(&Question, "question", "q", "", "ask a single question and print the response back")
rootCmd.Flags().StringVarP(&Prompt, "prompt", "p", "", "prompt to add to ChatGPT input, use 'list' or 'view:<name>' to inspect predefined, '<name>' to use a prompt, or otherwise supply any custom text")
rootCmd.Flags().StringVarP(&PromptDir, "prompt-dir", "P", "", "directory containing custom prompts, if not set the embedded defaults are used")
rootCmd.Flags().StringVarP(&PromptDir, "prompt-dir", "P", "prompts", "directory containing custom prompts, if not set the embedded defaults are used")
rootCmd.Flags().BoolVarP(&PromptMode, "interactive", "i", false, "start an interactive session with ChatGPT")
rootCmd.Flags().BoolVarP(&EditMode, "edit", "e", false, "request an edit with ChatGPT")
rootCmd.Flags().BoolVarP(&CodeMode, "code", "c", false, "request code completion with ChatGPT")
rootCmd.Flags().BoolVarP(&CleanPrompt, "clean", "x", false, "remove excess whitespace from prompt before sending")
rootCmd.Flags().BoolVarP(&WriteBack, "write", "w", false, "write response to end of context file")
@ -434,249 +248,8 @@ func main() {
rootCmd.Flags().Float64VarP(&TopP, "topp", "", 1.0, "set the TopP parameter")
rootCmd.Flags().Float64VarP(&PresencePenalty, "pres", "", 0.0, "set the Presence Penalty parameter")
rootCmd.Flags().Float64VarP(&FrequencyPenalty, "freq", "", 0.0, "set the Frequency Penalty parameter")
rootCmd.Flags().StringVarP(&Model, "model", "m", gpt3.GPT3TextDavinci003, "select the model to use with -q or -e")
rootCmd.Flags().StringVarP(&Model, "model", "m", openai.GPT3TextDavinci003, "select the model to use with -q or -e")
// run the command
rootCmd.Execute()
}
func RunPrompt(client *gpt3.Client) error {
ctx := context.Background()
scanner := bufio.NewScanner(os.Stdin)
quit := false
for !quit {
fmt.Print("> ")
if !scanner.Scan() {
break
}
question := scanner.Text()
parts := strings.Fields(question)
// look for commands
switch parts[0] {
case "quit", "q", "exit":
quit = true
continue
case "save":
name := parts[1]
fmt.Printf("saving session to %s\n", name)
err := os.WriteFile(name, []byte(PromptText), 0644)
if err != nil {
fmt.Println(err)
}
continue
case "model":
if len(parts) == 1 {
fmt.Println("model is set to", Model)
continue
}
Model = parts[1]
fmt.Println("model is now", Model)
continue
case "tokens":
if len(parts) == 1 {
fmt.Println("tokens is set to", MaxTokens)
continue
}
c, err := strconv.Atoi(parts[1])
if err != nil {
fmt.Println(err)
continue
}
MaxTokens = c
fmt.Println("tokens is now", MaxTokens)
continue
case "count":
if len(parts) == 1 {
fmt.Println("count is set to", Count)
continue
}
c, err := strconv.Atoi(parts[1])
if err != nil {
fmt.Println(err)
continue
}
Count = c
fmt.Println("count is now", Count)
continue
case "temp":
if len(parts) == 1 {
fmt.Println("temp is set to", Temp)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
Temp = f
fmt.Println("temp is now", Temp)
case "topp":
if len(parts) == 1 {
fmt.Println("topp is set to", TopP)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
TopP = f
fmt.Println("topp is now", TopP)
case "pres":
if len(parts) == 1 {
fmt.Println("pres is set to", PresencePenalty)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
PresencePenalty = f
fmt.Println("pres is now", PresencePenalty)
case "freq":
if len(parts) == 1 {
fmt.Println("freq is set to", FrequencyPenalty)
continue
}
f, err := strconv.ParseFloat(parts[1], 64)
if err != nil {
fmt.Println(err)
continue
}
FrequencyPenalty = f
fmt.Println("freq is now", FrequencyPenalty)
default:
// add the question to the existing prompt text, to keep context
PromptText += "\n> " + question
var R []string
var err error
if CodeMode {
R, err = GetCodeResponse(client, ctx, PromptText)
} else if EditMode {
R, err = GetEditsResponse(client, ctx, PromptText, Question)
} else {
R, err = GetCompletionResponse(client, ctx, PromptText)
}
if err != nil {
return err
}
final := ""
if len(R) == 1 {
final = R[0]
} else {
for i, r := range R {
final += fmt.Sprintf("[%d]: %s\n\n", i, r)
}
fmt.Println(final)
ok := false
pos := 0
for !ok {
fmt.Print("> ")
if !scanner.Scan() {
break
}
ans := scanner.Text()
pos, err = strconv.Atoi(ans)
if err != nil {
fmt.Println(err)
continue
}
if pos < 0 || pos >= Count {
fmt.Println("choice must be between 0 and", Count-1)
continue
}
ok = true
}
final = R[pos]
}
// we add response to the prompt, this is how ChatGPT sessions keep context
PromptText += "\n" + strings.TrimSpace(final)
// print the latest portion of the conversation
fmt.Println(final + "\n")
}
}
return nil
}
func RunOnce(client *gpt3.Client, filename string) error {
ctx := context.Background()
var R []string
var err error
if CodeMode {
R, err = GetCodeResponse(client, ctx, PromptText)
} else if EditMode {
R, err = GetEditsResponse(client, ctx, PromptText, Question)
} else {
R, err = GetCompletionResponse(client, ctx, PromptText)
}
if err != nil {
return err
}
final := ""
if len(R) == 1 {
final = R[0]
} else {
for i, r := range R {
final += fmt.Sprintf("[%d]: %s\n\n", i, r)
}
}
if filename == "" || !WriteBack {
fmt.Println(final)
} else {
err = AppendToFile(filename, final)
if err != nil {
return err
}
}
return nil
}
// AppendToFile provides a function to append data to an existing file,
// creating it if it doesn't exist
func AppendToFile(filename string, data string) error {
// Open the file in append mode
file, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
return err
}
// Append the data to the file
_, err = file.WriteString(data)
if err != nil {
return err
}
return file.Close()
}

Loading…
Cancel
Save