Skip to content

Commit

Permalink
Rewrite in Go (#1)
Browse files Browse the repository at this point in the history
  • Loading branch information
chrisdothtml authored Jun 1, 2024
1 parent 6557fc9 commit 90d6538
Show file tree
Hide file tree
Showing 14 changed files with 509 additions and 371 deletions.
63 changes: 16 additions & 47 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,60 +5,29 @@ on:
- 'v*.*.*'

jobs:
build:
strategy:
matrix:
include:
- os: windows-latest
artifact_name: windows
- os: ubuntu-latest
artifact_name: linux
- os: macos-latest
artifact_name: macos
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.x'
- name: Install dependencies
run: pip install -r requirements.txt
- name: Build binary
run: pyinstaller --onefile gpt_cmd.py
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.artifact_name }}
path: 'dist/gpt_cmd*'
retention-days: 1

release:
runs-on: ubuntu-latest
needs: build
build_and_release:
permissions:
contents: write
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Download artifacts
uses: actions/download-artifact@v4
- name: Rename artifacts
- name: Set up Go
uses: actions/setup-go@v4
with:
go-version: '1.22'
- name: Build binaries
run: |
mkdir -p bin
for os in windows linux macos; do
ext=""
if [ "$os" = "windows" ]; then
ext=".exe"
fi
GOOS=linux GOARCH=386 go build -o bin/gpt_cmd-linux-386
GOOS=linux GOARCH=amd64 go build -o bin/gpt_cmd-linux
GOOS=linux GOARCH=arm go build -o bin/gpt_cmd-linux-arm
GOOS=linux GOARCH=arm64 go build -o bin/gpt_cmd-linux-arm64
src="${os}/gpt_cmd${ext}"
dest="bin/gpt_cmd-${os}${ext}"
GOOS=darwin GOARCH=amd64 go build -o bin/gpt_cmd-darwin-amd64
GOOS=darwin GOARCH=arm64 go build -o bin/gpt_cmd-darwin-arm64
echo "Moving $src to $dest"
mv "$src" "$dest"
rm -rf "${os}/"
done
GOOS=windows GOARCH=386 go build -o bin/gpt_cmd-windows-386.exe
GOOS=windows GOARCH=amd64 go build -o bin/gpt_cmd-windows.exe
- name: Create release
uses: ncipollo/[email protected]
with:
artifacts: 'bin/gpt_cmd*'
artifacts: 'bin/*'
15 changes: 3 additions & 12 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,18 @@

## Running locally

First, install the dependencies (**note**: make sure you're using python 3 and pip 3):
First, install the dependencies (**note**: this was written with go v1.22.x):

```sh
# create virtual env
python -m venv env

# activate env
source env/bin/activate

# install deps
pip install -r requirements.txt
go mod tidy
```

Now you can run the tool via:

```sh
python -m gpt_cmd [...]
go run main.go [...]
```

## Cutting a release

Pushing a version tag (e.g. `v1.0.0`) will trigger the [release.yml](.github/workflows/release.yml) GitHub workflow, which will build binaries for supported OSes and publish a release with them.

The binaries are generated using [pyinstaller](https://pyinstaller.org/en/stable/).
33 changes: 33 additions & 0 deletions cmd/gpt.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
package cmd

import (
"context"

openai "github.com/sashabaranov/go-openai"
)

var OPENAI_CLIENT *openai.Client

type ChatMessage = openai.ChatCompletionMessage

func GetGPTResponse(messages []ChatMessage, model string, token string) string {
if OPENAI_CLIENT == nil {
OPENAI_CLIENT = openai.NewClient(token)
}

resp, err := OPENAI_CLIENT.CreateChatCompletion(
context.Background(),
openai.ChatCompletionRequest{
Model: model,
Messages: messages,
ResponseFormat: &openai.ChatCompletionResponseFormat{
Type: "json_object",
},
},
)
if err != nil {
panic(err)
}

return resp.Choices[0].Message.Content
}
231 changes: 231 additions & 0 deletions cmd/root.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,231 @@
package cmd

import (
_ "embed"
"encoding/json"
"fmt"
"os"
"path/filepath"
"runtime"
"strings"
"time"

"gpt_cmd/utils"

dedent "github.com/lithammer/dedent"
)

//go:embed system_prompt.txt
var SYSTEM_PROMPT string

var PROJECT_FILES_DIR = filepath.Join(utils.GetHomeDir(), ".gpt_cmd")
var CONVOS_DIR = filepath.Join(PROJECT_FILES_DIR, ".convos")
var ansi = utils.Ansi{}

type RuntimeOptions struct {
DangerouslySkipPrompts bool
Model string
APIToken string
}

type GPTResponse struct {
Commands []string `json:"commands"`
Context string `json:"context"`
ConvoFileName string `json:"convo-file-name"`
Status string `json:"status"`
}

func RunLoop(goal string, opts *RuntimeOptions) {
systemInfo := fmt.Sprintf("System info:\nOS: %s\nArchitecture: %s", runtime.GOOS, runtime.GOARCH)
messages := []ChatMessage{
{
Role: "system",
Content: SYSTEM_PROMPT,
},
{
Role: "user",
Content: fmt.Sprintf("%s\n%s", goal, systemInfo),
},
}

convoTimestamp := time.Now().Format("2006-01-02_15-04-05")
var convoFileName *string

// used to progressively update the local file for this convo
saveConvo := func() {
fileName := convoTimestamp
if convoFileName != nil {
fileName = fmt.Sprintf("%s_%s", *convoFileName, convoTimestamp)
}
fileName += ".json"

filePath := filepath.Join(CONVOS_DIR, fileName)
utils.EnsureDir(CONVOS_DIR)
utils.WriteFile(filePath, utils.JsonStringify(messages, true))
}

fmt.Printf("%s %s\n", ansi.Blue("Goal:"), goal)
for {
fmt.Println("\n----------")

// In each iteration, call GPT with the latest messages thread
rawResponse := GetGPTResponse(messages, opts.Model, opts.APIToken)
// Add GPT's response to the messages thread
messages = append(messages, ChatMessage{
Role: "assistant",
Content: rawResponse,
})
var response GPTResponse
json.Unmarshal([]byte(rawResponse), &response)

if convoFileName == nil && response.ConvoFileName != "" {
convoFileName = &response.ConvoFileName
}

// If `status` prop is provided, it means GPT determined the
// goal is completed. Report the status and print any context
// the GPT provided
if response.Status != "" {
wasSuccess := response.Status == "success"

if wasSuccess {
fmt.Println(ansi.Green("✅ Goal successfully achieved."))
} else {
fmt.Println(ansi.Red("❌ Goal failed."))
}

if response.Context != "" {
fmt.Println(response.Context)
}

saveConvo()
if wasSuccess {
os.Exit(0)
} else {
os.Exit(1)
}
}

if len(response.Commands) > 0 {
// This use of the `context` prop is for the GPT to provide
// info about the command(s) it's running
if response.Context != "" {
fmt.Printf("%s %s\n", ansi.Blue("Context:"), response.Context)
}

var cmdResults []map[string]interface{}
for index, cmd := range response.Commands {
if index > 0 {
fmt.Println("")
}

fmt.Printf("%s %s\n", ansi.Blue("Command:"), ansi.Dim(cmd))
if !opts.DangerouslySkipPrompts {
if utils.PromptUserYN("OK to run command?") {
utils.ClearPrevLine()
} else {
// User didn't want to run command, so save convo and exit
saveConvo()
os.Exit(1)
}
}

stdout, exitCode := utils.ExecCmd(cmd)

var exitCodeText = "Exit code:"
if exitCode == 0 {
exitCodeText = ansi.Green(exitCodeText)
} else {
exitCodeText = ansi.Red(exitCodeText)
}
fmt.Printf("%s %s\n", exitCodeText, ansi.Dim(fmt.Sprint(exitCode)))
if len(stdout) > 0 {
fmt.Println(ansi.Dim(stdout))
}

cmdResults = append(cmdResults, map[string]interface{}{
"command": cmd,
"stdout": stdout,
"exit_code": exitCode,
})

if exitCode != 0 {
break
}
}

// Add new message with the result(s) of the command(s)
messages = append(messages, ChatMessage{
Role: "user",
Content: utils.JsonStringify(cmdResults, false),
})
} else {
fmt.Println(ansi.Red("ERROR: No further commands provided, and no success/failure status was provided by GPT"))
saveConvo()
os.Exit(1)
}
}
}

func Execute() {
helpText := strings.TrimSpace(dedent.Dedent(`
Usage:
gpt_cmd <goal>
gpt_cmd --get-convos-dir
gpt_cmd --help, -h
Environment vars:
GPT_CMD_DANGEROUSLY_SKIP_PROMPTS [true]
GPT_CMD_MODEL [string] (Default: gpt-4o)
GPT_CMD_TOKEN [string]
GPT_CMD_TOKEN_FILE_PATH [string] (Default: ~/OPENAI_TOKEN)
`))

if len(os.Args) != 2 || os.Args[1] == "" {
fmt.Println(helpText)
os.Exit(1)
}

if os.Args[1] == "--help" || os.Args[1] == "-h" {
fmt.Println(helpText)
os.Exit(0)
}

if os.Args[1] == "--get-convos-dir" {
fmt.Println(CONVOS_DIR)
os.Exit(0)
}

// unrecognized arg passed in
if strings.HasPrefix(os.Args[1], "--") {
fmt.Println(helpText)
os.Exit(1)
}

var options = RuntimeOptions{
DangerouslySkipPrompts: utils.GetEnv("GPT_CMD_DANGEROUSLY_SKIP_PROMPTS", "") == "true",
Model: utils.GetEnv("GPT_CMD_MODEL", "gpt-4o"),
APIToken: "",
}

token := utils.GetEnv("GPT_CMD_TOKEN", "")
if token == "" {
tokenFilePath := utils.GetEnv(
"GPT_CMD_TOKEN_FILE_PATH",
filepath.Join(utils.GetHomeDir(), "OPENAI_TOKEN"),
)

if data, err := os.ReadFile(tokenFilePath); err == nil {
token = strings.TrimSpace(string(data))
}
}
options.APIToken = token

if options.APIToken == "" {
fmt.Println(ansi.Red("ERROR: Unable to resolve an OpenAI token\n"))
fmt.Println(helpText)
os.Exit(1)
}

RunLoop(os.Args[1], &options)
}
11 changes: 11 additions & 0 deletions cmd/system_prompt.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
Your job is to run commands necessary for achieving a task from a terminal.

You'll be provided with an end goal, and you'll send replies in JSON format containing an array of commands to run in the terminal. Each time you send command(s) to run, you'll then be provided with the resulting stdout and stderr (you're being accessed via the OpenAI API, so when possible, include arguments in your commands to reduce noise in stdout and stderr to limit API usage).

To convey context, you can use a JSON object with `context` (string) and `commands` (array of strings).

When you believe that the end goal is accomplished or unrecoverably failed, send a JSON object containing `status` ("success" or "failed") and `context` (noting things like commands that can be used to use any tools you installed, or why it failed if it did).

IMPORTANT NOTE: each command you provide is being executed in a subshell via a golang script, which means things like `cd` won't persist across commands, so you'll need to account for that.

IMPORTANT NOTE: in your response to the first user prompt, generate a short (5 words max) dash-separated file name to describe their prompt. Provide this in a `convo-file-name` property in your JSON object.
7 changes: 7 additions & 0 deletions go.mod
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
module gpt_cmd

go 1.22.3

require github.com/sashabaranov/go-openai v1.24.1

require github.com/lithammer/dedent v1.1.0
4 changes: 4 additions & 0 deletions go.sum
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
github.com/lithammer/dedent v1.1.0 h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=
github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc=
github.com/sashabaranov/go-openai v1.24.1 h1:DWK95XViNb+agQtuzsn+FyHhn3HQJ7Va8z04DQDJ1MI=
github.com/sashabaranov/go-openai v1.24.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
Loading

0 comments on commit 90d6538

Please sign in to comment.