2023-02-13 02:01:59 +00:00
package main
import (
"bufio"
"bytes"
"context"
"embed"
"fmt"
2023-03-21 07:38:28 +00:00
"io/fs"
2023-02-13 02:01:59 +00:00
"os"
2023-03-21 07:38:28 +00:00
"path/filepath"
2023-02-13 04:02:53 +00:00
"runtime/debug"
2023-02-13 05:14:46 +00:00
"strconv"
2023-02-13 02:01:59 +00:00
"strings"
2023-03-12 00:42:38 +00:00
gpt3 "github.com/sashabaranov/go-openai"
2023-02-13 02:01:59 +00:00
"github.com/spf13/cobra"
)
var LongHelp = `
Chat with ChatGPT in console .
Examples :
# start an interactive session
chatgpt - i
# ask chatgpt for a one - time response
chatgpt - q "answer me this ChatGPT..."
# provide context to a question or conversation
chatgpt context . txt - i
chatgpt context . txt - q "answer me this ChatGPT..."
2023-02-13 03:04:45 +00:00
# read prompt from file and -- write response back
2023-02-13 02:01:59 +00:00
chatgpt convo . txt
2023-02-13 03:04:45 +00:00
chatgpt convo . txt -- write
2023-02-13 02:01:59 +00:00
# pipe content from another program , useful for ! in vim visual mode
cat convo . txt | chatgpt
2023-03-21 07:38:28 +00:00
# inspect the predifined prompts , which set ChatGPT ' s mood
2023-02-13 02:01:59 +00:00
chatgpt - p list
chatgpt - p view : < name >
2023-03-21 07:38:28 +00:00
# use a prompts with any of the previous modes
2023-02-13 02:01:59 +00:00
chatgpt - p optimistic - i
chatgpt - p cynic - q "Is the world going to be ok?"
chatgpt - p teacher convo . txt
2023-02-13 02:44:25 +00:00
2023-03-21 07:38:28 +00:00
# set the directory for custom prompts
chatgpt - P prompts - p my - prompt - i
2023-02-13 06:30:35 +00:00
# edit mode
chatgpt - e ...
2023-02-13 05:14:46 +00:00
2023-02-13 06:30:35 +00:00
# code mode
chatgpt - c ...
2023-02-13 05:14:46 +00:00
2023-02-13 06:30:35 +00:00
# model options ( https : //platform.openai.com/docs/api-reference/completions/create)
2023-03-12 00:42:38 +00:00
chatgpt - T 4096 # set max tokens in reponse [ 0 , 4096 ]
chatgpt - C # clean whitespace before sending
chatgpt - E # echo back the prompt , useful for vim coding
chatgpt -- temp # set the temperature param [ 0.0 , 2.0 ]
chatgpt -- topp # set the TopP param [ 0.0 , 1.0 ]
chatgpt -- pres # set the Presence Penalty [ - 2.0 , 2.0 ]
chatgpt -- freq # set the Frequency Penalty [ - 2.0 , 2.0 ]
# change model selection , available models are listed here :
# https : //pkg.go.dev/github.com/sashabaranov/go-openai#Client.ListModels
chatgpt - m text - davinci - 003 # set the model to text - davinci - 003 ( the default )
chatgpt - m text - ada - 001 # set the model to text - ada - 001
2023-02-13 05:14:46 +00:00
2023-02-13 02:01:59 +00:00
`
2023-02-13 02:41:18 +00:00
var interactiveHelp = ` starting interactive session ...
2023-02-13 05:14:46 +00:00
' quit ' to exit
' save < filename > ' to preserve
' tokens ' to change the MaxToken param
' count ' to change number of responses
' temp ' set the temperature param [ 0.0 , 2.0 ]
' topp ' set the TopP param [ 0.0 , 1.0 ]
' pres ' set the Presence Penalty [ - 2.0 , 2.0 ]
' freq ' set the Frequency Penalty [ - 2.0 , 2.0 ]
2023-03-12 00:42:38 +00:00
' model ' to change the selected model
2023-02-13 02:41:18 +00:00
`
2023-02-13 06:30:35 +00:00
2023-03-21 07:38:28 +00:00
//go:embed prompts/*
2023-02-13 02:01:59 +00:00
var predefined embed . FS
2023-02-13 04:02:53 +00:00
var Version bool
2023-02-13 05:14:46 +00:00
// prompt vars
2023-02-13 02:01:59 +00:00
var Question string
2023-03-21 07:38:28 +00:00
var Prompt string
var PromptDir string
2023-02-13 02:01:59 +00:00
var PromptMode bool
2023-02-13 05:14:46 +00:00
var EditMode bool
var CodeMode bool
2023-02-13 02:41:18 +00:00
var CleanPrompt bool
2023-02-13 03:04:45 +00:00
var WriteBack bool
2023-02-13 02:01:59 +00:00
var PromptText string
2023-02-13 05:14:46 +00:00
// chatgpt vars
var MaxTokens int
var Count int
2023-02-13 06:30:35 +00:00
var Echo bool
2023-02-13 05:14:46 +00:00
var Temp float64
var TopP float64
var PresencePenalty float64
var FrequencyPenalty float64
2023-02-24 00:02:52 +00:00
var Model string
2023-03-21 07:38:28 +00:00
// internal vars
func init ( ) {
}
/ *
func GetChatCompletionResponse ( client * gpt3 . Client , ctx context . Context , question string ) ( [ ] string , error ) {
if CleanPrompt {
question = strings . ReplaceAll ( question , "\n" , " " )
question = strings . ReplaceAll ( question , " " , " " )
}
// insert newline at end to prevent completion of question
if ! strings . HasSuffix ( question , "\n" ) {
question += "\n"
}
req := gpt3 . ChatCompletionRequest {
Model : Model ,
MaxTokens : MaxTokens ,
Prompt : question ,
Echo : Echo ,
N : Count ,
Temperature : float32 ( Temp ) ,
TopP : float32 ( TopP ) ,
PresencePenalty : float32 ( PresencePenalty ) ,
FrequencyPenalty : float32 ( FrequencyPenalty ) ,
}
resp , err := client . CreateChatCompletion ( ctx , req )
if err != nil {
return nil , err
}
var r [ ] string
for _ , c := range resp . Choices {
r = append ( r , c . Text )
}
return r , nil
}
* /
2023-02-13 05:14:46 +00:00
func GetCompletionResponse ( client * gpt3 . Client , ctx context . Context , question string ) ( [ ] string , error ) {
2023-02-13 02:41:18 +00:00
if CleanPrompt {
question = strings . ReplaceAll ( question , "\n" , " " )
question = strings . ReplaceAll ( question , " " , " " )
}
2023-02-13 06:30:35 +00:00
// insert newline at end to prevent completion of question
if ! strings . HasSuffix ( question , "\n" ) {
question += "\n"
}
2023-02-13 02:01:59 +00:00
req := gpt3 . CompletionRequest {
2023-02-24 00:02:52 +00:00
Model : Model ,
2023-02-13 06:30:35 +00:00
MaxTokens : MaxTokens ,
Prompt : question ,
Echo : Echo ,
N : Count ,
Temperature : float32 ( Temp ) ,
TopP : float32 ( TopP ) ,
PresencePenalty : float32 ( PresencePenalty ) ,
FrequencyPenalty : float32 ( FrequencyPenalty ) ,
2023-02-13 02:01:59 +00:00
}
resp , err := client . CreateCompletion ( ctx , req )
if err != nil {
2023-02-13 05:14:46 +00:00
return nil , err
2023-02-13 02:01:59 +00:00
}
2023-02-13 05:14:46 +00:00
var r [ ] string
for _ , c := range resp . Choices {
r = append ( r , c . Text )
}
return r , nil
}
func GetEditsResponse ( client * gpt3 . Client , ctx context . Context , input , instruction string ) ( [ ] string , error ) {
if CleanPrompt {
input = strings . ReplaceAll ( input , "\n" , " " )
input = strings . ReplaceAll ( input , " " , " " )
}
2023-02-24 00:02:52 +00:00
m := Model
2023-02-13 05:14:46 +00:00
req := gpt3 . EditsRequest {
Model : & m ,
Input : input ,
Instruction : instruction ,
N : Count ,
Temperature : float32 ( Temp ) ,
TopP : float32 ( TopP ) ,
}
resp , err := client . Edits ( ctx , req )
if err != nil {
return nil , err
}
var r [ ] string
for _ , c := range resp . Choices {
r = append ( r , c . Text )
}
return r , nil
}
func GetCodeResponse ( client * gpt3 . Client , ctx context . Context , question string ) ( [ ] string , error ) {
if CleanPrompt {
question = strings . ReplaceAll ( question , "\n" , " " )
question = strings . ReplaceAll ( question , " " , " " )
}
2023-02-13 06:30:35 +00:00
// insert newline at end to prevent completion of question
if ! strings . HasSuffix ( question , "\n" ) {
question += "\n"
}
2023-02-13 05:14:46 +00:00
req := gpt3 . CompletionRequest {
2023-02-13 06:30:35 +00:00
Model : gpt3 . CodexCodeDavinci002 ,
MaxTokens : MaxTokens ,
Prompt : question ,
Echo : Echo ,
N : Count ,
Temperature : float32 ( Temp ) ,
TopP : float32 ( TopP ) ,
PresencePenalty : float32 ( PresencePenalty ) ,
FrequencyPenalty : float32 ( FrequencyPenalty ) ,
2023-02-13 05:14:46 +00:00
}
resp , err := client . CreateCompletion ( ctx , req )
if err != nil {
return nil , err
}
var r [ ] string
for _ , c := range resp . Choices {
r = append ( r , c . Text )
}
return r , nil
2023-02-13 02:01:59 +00:00
}
2023-02-13 04:02:53 +00:00
func printVersion ( ) {
info , _ := debug . ReadBuildInfo ( )
GoVersion := info . GoVersion
Commit := ""
BuildDate := ""
dirty := false
for _ , s := range info . Settings {
if s . Key == "vcs.revision" {
Commit = s . Value
}
if s . Key == "vcs.time" {
BuildDate = s . Value
}
if s . Key == "vcs.modified" {
if s . Value == "true" {
dirty = true
}
}
}
if dirty {
Commit += "+dirty"
}
fmt . Printf ( "%s %s %s\n" , Commit , BuildDate , GoVersion )
}
2023-02-13 02:01:59 +00:00
type NullWriter int
func ( NullWriter ) Write ( [ ] byte ) ( int , error ) { return 0 , nil }
func main ( ) {
2023-02-24 00:02:52 +00:00
2023-02-13 02:01:59 +00:00
apiKey := os . Getenv ( "CHATGPT_API_KEY" )
if apiKey == "" {
fmt . Println ( "CHATGPT_API_KEY environment var is missing\nVisit https://platform.openai.com/account/api-keys to get one\n" )
os . Exit ( 1 )
}
2023-03-21 07:38:28 +00:00
if PromptDir == "" {
if v := os . Getenv ( "CHATGPT_PROMPT_DIR" ) ; v != "" {
PromptDir = v
}
}
2023-02-13 02:01:59 +00:00
client := gpt3 . NewClient ( apiKey )
rootCmd := & cobra . Command {
Use : "chatgpt [file]" ,
Short : "Chat with ChatGPT in console." ,
2023-02-13 02:12:32 +00:00
Long : LongHelp ,
2023-02-13 02:01:59 +00:00
Run : func ( cmd * cobra . Command , args [ ] string ) {
2023-02-13 04:02:53 +00:00
if Version {
printVersion ( )
os . Exit ( 0 )
}
2023-02-13 02:01:59 +00:00
var err error
var filename string
2023-02-13 02:17:35 +00:00
// We build up PromptText as we go, based on flags
2023-03-21 07:38:28 +00:00
// Handle the prompt flag
if Prompt != "" {
var files [ ] fs . DirEntry
2023-02-13 02:01:59 +00:00
2023-03-21 07:38:28 +00:00
if PromptDir == "" {
files , err = predefined . ReadDir ( "prompts" )
if err != nil {
panic ( err )
}
} else {
files , err = os . ReadDir ( PromptDir )
if err != nil {
fmt . Println ( err )
os . Exit ( 1 )
}
2023-02-13 02:01:59 +00:00
}
2023-02-13 02:17:35 +00:00
// list and exit
2023-03-21 07:38:28 +00:00
if Prompt == "list" {
2023-02-13 02:01:59 +00:00
for _ , f := range files {
fmt . Println ( strings . TrimSuffix ( f . Name ( ) , ".txt" ) )
}
os . Exit ( 0 )
}
2023-03-21 07:38:28 +00:00
// are we in view mode?
var viewMode bool
if strings . HasPrefix ( Prompt , "view:" ) {
Prompt = strings . TrimPrefix ( Prompt , "view:" )
viewMode = true
2023-02-13 02:01:59 +00:00
}
2023-03-21 07:38:28 +00:00
// read prompt pretext
var contents [ ] byte
if PromptDir == "" {
contents , err = predefined . ReadFile ( "prompts/" + Prompt + ".txt" )
} else {
contents , err = os . ReadFile ( filepath . Join ( PromptDir , Prompt + ".txt" ) )
}
if err != nil {
fmt . Println ( err )
os . Exit ( 1 )
}
// print and exit or...
2023-02-13 06:30:35 +00:00
// prime prompt with known pretext
2023-03-21 07:38:28 +00:00
if viewMode {
fmt . Println ( string ( contents ) )
os . Exit ( 0 )
} else {
PromptText = string ( contents )
2023-02-13 02:01:59 +00:00
}
2023-02-13 02:17:35 +00:00
// prime prompt with custom pretext
2023-02-13 02:01:59 +00:00
if PromptText == "" {
2023-03-21 07:38:28 +00:00
PromptText = Prompt
2023-02-13 02:01:59 +00:00
}
}
2023-04-05 22:35:19 +00:00
// no args, or interactive... read from stdin
2023-02-13 02:17:35 +00:00
// this is mainly for replacing text in vim
2023-04-05 22:35:19 +00:00
if len ( args ) == 0 && ! PromptMode {
2023-02-13 02:01:59 +00:00
reader := bufio . NewReader ( os . Stdin )
var buf bytes . Buffer
for {
2023-02-13 02:12:32 +00:00
b , err := reader . ReadByte ( )
if err != nil {
break
}
buf . WriteByte ( b )
2023-02-13 02:01:59 +00:00
}
PromptText += buf . String ( )
} else if len ( args ) == 1 {
2023-02-13 02:17:35 +00:00
// if we have an arg, add it to the prompt
2023-02-13 02:01:59 +00:00
filename = args [ 0 ]
content , err := os . ReadFile ( filename )
if err != nil {
fmt . Println ( err )
return
}
PromptText += string ( content )
}
2023-02-13 02:17:35 +00:00
// if there is a question, it comes last in the prompt
2023-02-13 05:14:46 +00:00
if Question != "" && ! EditMode {
2023-02-13 02:01:59 +00:00
PromptText += "\n" + Question
}
2023-02-13 02:17:35 +00:00
// interactive or file mode
2023-02-13 02:01:59 +00:00
if PromptMode {
2023-02-13 02:41:18 +00:00
fmt . Println ( interactiveHelp )
2023-02-13 02:01:59 +00:00
fmt . Println ( PromptText )
err = RunPrompt ( client )
} else {
2023-02-13 02:17:35 +00:00
// empty filename (no args) prints to stdout
2023-02-13 02:01:59 +00:00
err = RunOnce ( client , filename )
}
if err != nil {
fmt . Println ( err )
os . Exit ( 1 )
}
} ,
}
2023-02-13 02:17:35 +00:00
// setup flags
2023-02-13 05:14:46 +00:00
rootCmd . Flags ( ) . BoolVarP ( & Version , "version" , "" , false , "print version information" )
// prompt releated
2023-02-13 02:01:59 +00:00
rootCmd . Flags ( ) . StringVarP ( & Question , "question" , "q" , "" , "ask a single question and print the response back" )
2023-03-21 07:38:28 +00:00
rootCmd . Flags ( ) . StringVarP ( & Prompt , "prompt" , "p" , "" , "prompt to add to ChatGPT input, use 'list' or 'view:<name>' to inspect predefined, '<name>' to use a prompt, or otherwise supply any custom text" )
rootCmd . Flags ( ) . StringVarP ( & PromptDir , "prompt-dir" , "P" , "" , "directory containing custom prompts, if not set the embedded defaults are used" )
2023-02-13 02:01:59 +00:00
rootCmd . Flags ( ) . BoolVarP ( & PromptMode , "interactive" , "i" , false , "start an interactive session with ChatGPT" )
2023-02-13 05:14:46 +00:00
rootCmd . Flags ( ) . BoolVarP ( & EditMode , "edit" , "e" , false , "request an edit with ChatGPT" )
rootCmd . Flags ( ) . BoolVarP ( & CodeMode , "code" , "c" , false , "request code completion with ChatGPT" )
rootCmd . Flags ( ) . BoolVarP ( & CleanPrompt , "clean" , "x" , false , "remove excess whitespace from prompt before sending" )
2023-02-13 03:04:45 +00:00
rootCmd . Flags ( ) . BoolVarP ( & WriteBack , "write" , "w" , false , "write response to end of context file" )
2023-02-13 02:01:59 +00:00
2023-02-13 05:14:46 +00:00
// params related
rootCmd . Flags ( ) . IntVarP ( & MaxTokens , "tokens" , "T" , 1024 , "set the MaxTokens to generate per response" )
rootCmd . Flags ( ) . IntVarP ( & Count , "count" , "C" , 1 , "set the number of response options to create" )
2023-02-13 06:30:35 +00:00
rootCmd . Flags ( ) . BoolVarP ( & Echo , "echo" , "E" , false , "Echo back the prompt, useful for vim coding" )
2023-03-21 07:38:28 +00:00
rootCmd . Flags ( ) . Float64VarP ( & Temp , "temp" , "" , 0.7 , "set the temperature parameter" )
2023-02-13 05:14:46 +00:00
rootCmd . Flags ( ) . Float64VarP ( & TopP , "topp" , "" , 1.0 , "set the TopP parameter" )
rootCmd . Flags ( ) . Float64VarP ( & PresencePenalty , "pres" , "" , 0.0 , "set the Presence Penalty parameter" )
rootCmd . Flags ( ) . Float64VarP ( & FrequencyPenalty , "freq" , "" , 0.0 , "set the Frequency Penalty parameter" )
2023-02-24 00:02:52 +00:00
rootCmd . Flags ( ) . StringVarP ( & Model , "model" , "m" , gpt3 . GPT3TextDavinci003 , "select the model to use with -q or -e" )
2023-02-13 05:14:46 +00:00
// run the command
2023-02-13 02:01:59 +00:00
rootCmd . Execute ( )
}
func RunPrompt ( client * gpt3 . Client ) error {
ctx := context . Background ( )
scanner := bufio . NewScanner ( os . Stdin )
quit := false
for ! quit {
fmt . Print ( "> " )
if ! scanner . Scan ( ) {
break
}
question := scanner . Text ( )
2023-02-13 05:14:46 +00:00
parts := strings . Fields ( question )
// look for commands
switch parts [ 0 ] {
case "quit" , "q" , "exit" :
quit = true
continue
2023-02-13 02:41:18 +00:00
2023-02-13 05:14:46 +00:00
case "save" :
2023-02-13 02:41:18 +00:00
name := parts [ 1 ]
2023-02-13 05:14:46 +00:00
fmt . Printf ( "saving session to %s\n" , name )
2023-02-13 02:41:18 +00:00
err := os . WriteFile ( name , [ ] byte ( PromptText ) , 0644 )
if err != nil {
2023-02-13 05:14:46 +00:00
fmt . Println ( err )
2023-02-13 02:41:18 +00:00
}
2023-02-13 05:14:46 +00:00
continue
2023-02-13 02:41:18 +00:00
2023-03-12 00:42:38 +00:00
case "model" :
if len ( parts ) == 1 {
fmt . Println ( "model is set to" , Model )
continue
}
Model = parts [ 1 ]
fmt . Println ( "model is now" , Model )
continue
2023-02-13 05:14:46 +00:00
case "tokens" :
if len ( parts ) == 1 {
fmt . Println ( "tokens is set to" , MaxTokens )
continue
}
c , err := strconv . Atoi ( parts [ 1 ] )
if err != nil {
fmt . Println ( err )
continue
}
MaxTokens = c
fmt . Println ( "tokens is now" , MaxTokens )
2023-02-13 02:41:18 +00:00
continue
2023-02-13 05:14:46 +00:00
case "count" :
if len ( parts ) == 1 {
fmt . Println ( "count is set to" , Count )
continue
}
c , err := strconv . Atoi ( parts [ 1 ] )
if err != nil {
fmt . Println ( err )
continue
}
Count = c
fmt . Println ( "count is now" , Count )
continue
case "temp" :
if len ( parts ) == 1 {
fmt . Println ( "temp is set to" , Temp )
continue
}
f , err := strconv . ParseFloat ( parts [ 1 ] , 64 )
if err != nil {
fmt . Println ( err )
continue
}
Temp = f
fmt . Println ( "temp is now" , Temp )
case "topp" :
if len ( parts ) == 1 {
fmt . Println ( "topp is set to" , TopP )
continue
}
f , err := strconv . ParseFloat ( parts [ 1 ] , 64 )
if err != nil {
fmt . Println ( err )
continue
}
TopP = f
fmt . Println ( "topp is now" , TopP )
case "pres" :
if len ( parts ) == 1 {
fmt . Println ( "pres is set to" , PresencePenalty )
continue
}
f , err := strconv . ParseFloat ( parts [ 1 ] , 64 )
if err != nil {
fmt . Println ( err )
continue
}
PresencePenalty = f
fmt . Println ( "pres is now" , PresencePenalty )
case "freq" :
if len ( parts ) == 1 {
fmt . Println ( "freq is set to" , FrequencyPenalty )
continue
}
f , err := strconv . ParseFloat ( parts [ 1 ] , 64 )
if err != nil {
fmt . Println ( err )
continue
}
FrequencyPenalty = f
fmt . Println ( "freq is now" , FrequencyPenalty )
2023-02-13 02:01:59 +00:00
default :
2023-02-13 02:17:35 +00:00
// add the question to the existing prompt text, to keep context
2023-02-13 02:41:18 +00:00
PromptText += "\n> " + question
2023-02-13 05:14:46 +00:00
var R [ ] string
var err error
if CodeMode {
R , err = GetCodeResponse ( client , ctx , PromptText )
} else if EditMode {
R , err = GetEditsResponse ( client , ctx , PromptText , Question )
} else {
R , err = GetCompletionResponse ( client , ctx , PromptText )
}
2023-02-13 02:01:59 +00:00
if err != nil {
return err
}
2023-02-13 05:14:46 +00:00
final := ""
if len ( R ) == 1 {
final = R [ 0 ]
} else {
for i , r := range R {
final += fmt . Sprintf ( "[%d]: %s\n\n" , i , r )
}
fmt . Println ( final )
ok := false
pos := 0
for ! ok {
fmt . Print ( "> " )
if ! scanner . Scan ( ) {
break
}
ans := scanner . Text ( )
pos , err = strconv . Atoi ( ans )
if err != nil {
fmt . Println ( err )
continue
}
if pos < 0 || pos >= Count {
fmt . Println ( "choice must be between 0 and" , Count - 1 )
continue
}
ok = true
}
final = R [ pos ]
}
2023-02-13 02:17:35 +00:00
// we add response to the prompt, this is how ChatGPT sessions keep context
2023-02-13 05:14:46 +00:00
PromptText += "\n" + strings . TrimSpace ( final )
2023-02-13 02:17:35 +00:00
// print the latest portion of the conversation
2023-02-13 05:14:46 +00:00
fmt . Println ( final + "\n" )
2023-02-13 02:01:59 +00:00
}
}
2023-02-13 02:12:32 +00:00
2023-02-13 02:01:59 +00:00
return nil
}
func RunOnce ( client * gpt3 . Client , filename string ) error {
ctx := context . Background ( )
2023-02-13 05:14:46 +00:00
var R [ ] string
var err error
if CodeMode {
R , err = GetCodeResponse ( client , ctx , PromptText )
} else if EditMode {
R , err = GetEditsResponse ( client , ctx , PromptText , Question )
} else {
R , err = GetCompletionResponse ( client , ctx , PromptText )
}
2023-02-13 02:01:59 +00:00
if err != nil {
return err
}
2023-02-13 05:14:46 +00:00
final := ""
if len ( R ) == 1 {
final = R [ 0 ]
} else {
for i , r := range R {
final += fmt . Sprintf ( "[%d]: %s\n\n" , i , r )
}
}
2023-02-13 03:04:45 +00:00
if filename == "" || ! WriteBack {
2023-02-13 05:14:46 +00:00
fmt . Println ( final )
2023-02-13 02:01:59 +00:00
} else {
2023-02-13 05:14:46 +00:00
err = AppendToFile ( filename , final )
2023-02-13 02:01:59 +00:00
if err != nil {
return err
}
}
return nil
}
// AppendToFile provides a function to append data to an existing file,
// creating it if it doesn't exist
func AppendToFile ( filename string , data string ) error {
// Open the file in append mode
file , err := os . OpenFile ( filename , os . O_APPEND | os . O_CREATE | os . O_WRONLY , 0644 )
if err != nil {
return err
}
// Append the data to the file
_ , err = file . WriteString ( data )
if err != nil {
return err
}
return file . Close ( )
}