diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1199f97..169c25b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -5,60 +5,29 @@ on: - 'v*.*.*' jobs: - build: - strategy: - matrix: - include: - - os: windows-latest - artifact_name: windows - - os: ubuntu-latest - artifact_name: linux - - os: macos-latest - artifact_name: macos - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v4 - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.x' - - name: Install dependencies - run: pip install -r requirements.txt - - name: Build binary - run: pyinstaller --onefile gpt_cmd.py - - name: Upload artifact - uses: actions/upload-artifact@v4 - with: - name: ${{ matrix.artifact_name }} - path: 'dist/gpt_cmd*' - retention-days: 1 - - release: - runs-on: ubuntu-latest - needs: build + build_and_release: permissions: contents: write + runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Download artifacts - uses: actions/download-artifact@v4 - - name: Rename artifacts + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: '1.22' + - name: Build binaries run: | - mkdir -p bin - for os in windows linux macos; do - ext="" - if [ "$os" = "windows" ]; then - ext=".exe" - fi + GOOS=linux GOARCH=386 go build -o bin/gpt_cmd-linux-386 + GOOS=linux GOARCH=amd64 go build -o bin/gpt_cmd-linux + GOOS=linux GOARCH=arm go build -o bin/gpt_cmd-linux-arm + GOOS=linux GOARCH=arm64 go build -o bin/gpt_cmd-linux-arm64 - src="${os}/gpt_cmd${ext}" - dest="bin/gpt_cmd-${os}${ext}" + GOOS=darwin GOARCH=amd64 go build -o bin/gpt_cmd-darwin-amd64 + GOOS=darwin GOARCH=arm64 go build -o bin/gpt_cmd-darwin-arm64 - echo "Moving $src to $dest" - mv "$src" "$dest" - rm -rf "${os}/" - done + GOOS=windows GOARCH=386 go build -o bin/gpt_cmd-windows-386.exe + GOOS=windows GOARCH=amd64 go build -o bin/gpt_cmd-windows.exe - name: Create release uses: ncipollo/release-action@v1.14.0 with: - artifacts: 'bin/gpt_cmd*' + artifacts: 'bin/*' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b1e83a9..405d412 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,27 +2,18 @@ ## Running locally -First, install the dependencies (**note**: make sure you're using python 3 and pip 3): +First, install the dependencies (**note**: this was written with go v1.22.x): ```sh -# create virtual env -python -m venv env - -# activate env -source env/bin/activate - -# install deps -pip install -r requirements.txt +go mod tidy ``` Now you can run the tool via: ```sh -python -m gpt_cmd [...] +go run main.go [...] ``` ## Cutting a release Pushing a version tag (e.g. `v1.0.0`) will trigger the [release.yml](.github/workflows/release.yml) GitHub workflow, which will build binaries for supported OSes and publish a release with them. - -The binaries are generated using [pyinstaller](https://pyinstaller.org/en/stable/). diff --git a/cmd/gpt.go b/cmd/gpt.go new file mode 100644 index 0000000..ebc107a --- /dev/null +++ b/cmd/gpt.go @@ -0,0 +1,33 @@ +package cmd + +import ( + "context" + + openai "github.com/sashabaranov/go-openai" +) + +var OPENAI_CLIENT *openai.Client + +type ChatMessage = openai.ChatCompletionMessage + +func GetGPTResponse(messages []ChatMessage, model string, token string) string { + if OPENAI_CLIENT == nil { + OPENAI_CLIENT = openai.NewClient(token) + } + + resp, err := OPENAI_CLIENT.CreateChatCompletion( + context.Background(), + openai.ChatCompletionRequest{ + Model: model, + Messages: messages, + ResponseFormat: &openai.ChatCompletionResponseFormat{ + Type: "json_object", + }, + }, + ) + if err != nil { + panic(err) + } + + return resp.Choices[0].Message.Content +} diff --git a/cmd/root.go b/cmd/root.go new file mode 100644 index 0000000..f0fc20b --- /dev/null +++ b/cmd/root.go @@ -0,0 +1,231 @@ +package cmd + +import ( + _ "embed" + "encoding/json" + "fmt" + "os" + "path/filepath" + "runtime" + "strings" + "time" + + "gpt_cmd/utils" + + dedent "github.com/lithammer/dedent" +) + +//go:embed system_prompt.txt +var SYSTEM_PROMPT string + +var PROJECT_FILES_DIR = filepath.Join(utils.GetHomeDir(), ".gpt_cmd") +var CONVOS_DIR = filepath.Join(PROJECT_FILES_DIR, ".convos") +var ansi = utils.Ansi{} + +type RuntimeOptions struct { + DangerouslySkipPrompts bool + Model string + APIToken string +} + +type GPTResponse struct { + Commands []string `json:"commands"` + Context string `json:"context"` + ConvoFileName string `json:"convo-file-name"` + Status string `json:"status"` +} + +func RunLoop(goal string, opts *RuntimeOptions) { + systemInfo := fmt.Sprintf("System info:\nOS: %s\nArchitecture: %s", runtime.GOOS, runtime.GOARCH) + messages := []ChatMessage{ + { + Role: "system", + Content: SYSTEM_PROMPT, + }, + { + Role: "user", + Content: fmt.Sprintf("%s\n%s", goal, systemInfo), + }, + } + + convoTimestamp := time.Now().Format("2006-01-02_15-04-05") + var convoFileName *string + + // used to progressively update the local file for this convo + saveConvo := func() { + fileName := convoTimestamp + if convoFileName != nil { + fileName = fmt.Sprintf("%s_%s", *convoFileName, convoTimestamp) + } + fileName += ".json" + + filePath := filepath.Join(CONVOS_DIR, fileName) + utils.EnsureDir(CONVOS_DIR) + utils.WriteFile(filePath, utils.JsonStringify(messages, true)) + } + + fmt.Printf("%s %s\n", ansi.Blue("Goal:"), goal) + for { + fmt.Println("\n----------") + + // In each iteration, call GPT with the latest messages thread + rawResponse := GetGPTResponse(messages, opts.Model, opts.APIToken) + // Add GPT's response to the messages thread + messages = append(messages, ChatMessage{ + Role: "assistant", + Content: rawResponse, + }) + var response GPTResponse + json.Unmarshal([]byte(rawResponse), &response) + + if convoFileName == nil && response.ConvoFileName != "" { + convoFileName = &response.ConvoFileName + } + + // If `status` prop is provided, it means GPT determined the + // goal is completed. Report the status and print any context + // the GPT provided + if response.Status != "" { + wasSuccess := response.Status == "success" + + if wasSuccess { + fmt.Println(ansi.Green("✅ Goal successfully achieved.")) + } else { + fmt.Println(ansi.Red("❌ Goal failed.")) + } + + if response.Context != "" { + fmt.Println(response.Context) + } + + saveConvo() + if wasSuccess { + os.Exit(0) + } else { + os.Exit(1) + } + } + + if len(response.Commands) > 0 { + // This use of the `context` prop is for the GPT to provide + // info about the command(s) it's running + if response.Context != "" { + fmt.Printf("%s %s\n", ansi.Blue("Context:"), response.Context) + } + + var cmdResults []map[string]interface{} + for index, cmd := range response.Commands { + if index > 0 { + fmt.Println("") + } + + fmt.Printf("%s %s\n", ansi.Blue("Command:"), ansi.Dim(cmd)) + if !opts.DangerouslySkipPrompts { + if utils.PromptUserYN("OK to run command?") { + utils.ClearPrevLine() + } else { + // User didn't want to run command, so save convo and exit + saveConvo() + os.Exit(1) + } + } + + stdout, exitCode := utils.ExecCmd(cmd) + + var exitCodeText = "Exit code:" + if exitCode == 0 { + exitCodeText = ansi.Green(exitCodeText) + } else { + exitCodeText = ansi.Red(exitCodeText) + } + fmt.Printf("%s %s\n", exitCodeText, ansi.Dim(fmt.Sprint(exitCode))) + if len(stdout) > 0 { + fmt.Println(ansi.Dim(stdout)) + } + + cmdResults = append(cmdResults, map[string]interface{}{ + "command": cmd, + "stdout": stdout, + "exit_code": exitCode, + }) + + if exitCode != 0 { + break + } + } + + // Add new message with the result(s) of the command(s) + messages = append(messages, ChatMessage{ + Role: "user", + Content: utils.JsonStringify(cmdResults, false), + }) + } else { + fmt.Println(ansi.Red("ERROR: No further commands provided, and no success/failure status was provided by GPT")) + saveConvo() + os.Exit(1) + } + } +} + +func Execute() { + helpText := strings.TrimSpace(dedent.Dedent(` + Usage: + gpt_cmd + gpt_cmd --get-convos-dir + gpt_cmd --help, -h + + Environment vars: + GPT_CMD_DANGEROUSLY_SKIP_PROMPTS [true] + GPT_CMD_MODEL [string] (Default: gpt-4o) + GPT_CMD_TOKEN [string] + GPT_CMD_TOKEN_FILE_PATH [string] (Default: ~/OPENAI_TOKEN) + `)) + + if len(os.Args) != 2 || os.Args[1] == "" { + fmt.Println(helpText) + os.Exit(1) + } + + if os.Args[1] == "--help" || os.Args[1] == "-h" { + fmt.Println(helpText) + os.Exit(0) + } + + if os.Args[1] == "--get-convos-dir" { + fmt.Println(CONVOS_DIR) + os.Exit(0) + } + + // unrecognized arg passed in + if strings.HasPrefix(os.Args[1], "--") { + fmt.Println(helpText) + os.Exit(1) + } + + var options = RuntimeOptions{ + DangerouslySkipPrompts: utils.GetEnv("GPT_CMD_DANGEROUSLY_SKIP_PROMPTS", "") == "true", + Model: utils.GetEnv("GPT_CMD_MODEL", "gpt-4o"), + APIToken: "", + } + + token := utils.GetEnv("GPT_CMD_TOKEN", "") + if token == "" { + tokenFilePath := utils.GetEnv( + "GPT_CMD_TOKEN_FILE_PATH", + filepath.Join(utils.GetHomeDir(), "OPENAI_TOKEN"), + ) + + if data, err := os.ReadFile(tokenFilePath); err == nil { + token = strings.TrimSpace(string(data)) + } + } + options.APIToken = token + + if options.APIToken == "" { + fmt.Println(ansi.Red("ERROR: Unable to resolve an OpenAI token\n")) + fmt.Println(helpText) + os.Exit(1) + } + + RunLoop(os.Args[1], &options) +} diff --git a/cmd/system_prompt.txt b/cmd/system_prompt.txt new file mode 100644 index 0000000..e447d1c --- /dev/null +++ b/cmd/system_prompt.txt @@ -0,0 +1,11 @@ +Your job is to run commands necessary for achieving a task from a terminal. + +You'll be provided with an end goal, and you'll send replies in JSON format containing an array of commands to run in the terminal. Each time you send command(s) to run, you'll then be provided with the resulting stdout and stderr (you're being accessed via the OpenAI API, so when possible, include arguments in your commands to reduce noise in stdout and stderr to limit API usage). + +To convey context, you can use a JSON object with `context` (string) and `commands` (array of strings). + +When you believe that the end goal is accomplished or unrecoverably failed, send a JSON object containing `status` ("success" or "failed") and `context` (noting things like commands that can be used to use any tools you installed, or why it failed if it did). + +IMPORTANT NOTE: each command you provide is being executed in a subshell via a golang script, which means things like `cd` won't persist across commands, so you'll need to account for that. + +IMPORTANT NOTE: in your response to the first user prompt, generate a short (5 words max) dash-separated file name to describe their prompt. Provide this in a `convo-file-name` property in your JSON object. diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..ab39428 --- /dev/null +++ b/go.mod @@ -0,0 +1,7 @@ +module gpt_cmd + +go 1.22.3 + +require github.com/sashabaranov/go-openai v1.24.1 + +require github.com/lithammer/dedent v1.1.0 diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..c7ecfe8 --- /dev/null +++ b/go.sum @@ -0,0 +1,4 @@ +github.com/lithammer/dedent v1.1.0 h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY= +github.com/lithammer/dedent v1.1.0/go.mod h1:jrXYCQtgg0nJiN+StA2KgR7w6CiQNv9Fd/Z9BP0jIOc= +github.com/sashabaranov/go-openai v1.24.1 h1:DWK95XViNb+agQtuzsn+FyHhn3HQJ7Va8z04DQDJ1MI= +github.com/sashabaranov/go-openai v1.24.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg= diff --git a/gpt_cmd.py b/gpt_cmd.py deleted file mode 100644 index efe8696..0000000 --- a/gpt_cmd.py +++ /dev/null @@ -1,214 +0,0 @@ -import datetime -import json -import os -import platform -import subprocess -import sys -from openai import OpenAI - -SYSTEM_PROMPT = ''' -Your job is to run commands necessary for achieving a task from a terminal. - -You'll be provided with an end goal, and you'll send replies in JSON format containing an array of commands to run in the terminal. Each time you send command(s) to run, you'll then be provided with the resulting stdout and stderr (you're being accessed via the OpenAI API, so when possible, include arguments in your commands to reduce noise in stdout and stderr to limit API usage). - -To convey context, you can use a JSON object with `context` (string) and `commands` (array). - -When you believe that the end goal is accomplished or unrecoverably failed, send a JSON object containing `status` ("success" or "failed") and `context` (noting things like commands that can be used to use any tools you installed, or why it failed if it did). - -IMPORTANT NOTE: each command you provide is being executed in a subshell via a python script, which means things like `cd` won't persist across commands, so you'll need to account for that. - -IMPORTANT NOTE: in your response to the first user prompt, generate a short (5 words max) dash-separated file name to describe their prompt. Provide this in a `convo-file-name` property in your JSON object. -''' - -def ensure_dir(directory): - if not os.path.exists(directory): - os.makedirs(directory, exist_ok=True) - -def read_file(file_path): - with open(file_path, 'r') as file: - return file.read().strip() - -def write_file(file_path, content): - with open(file_path, 'w') as file: - file.write(content) - -# runtime options -OPTIONS = { - 'dangerously_skip_prompts': os.environ.get('GPT_CMD_DANGEROUSLY_SKIP_PROMPTS') == 'true', - 'model': os.environ.get('GPT_CMD_MODEL', 'gpt-4o'), - 'token_file_path': os.environ.get( - 'GPT_CMD_TOKEN_FILE_PATH', - os.path.join(os.path.expanduser('~'), 'OPENAI_TOKEN') - ), -} - -PROJECT_FILES_DIR = os.path.join(os.path.expanduser('~'), '.gpt_cmd') -CONVOS_DIR = os.path.join(PROJECT_FILES_DIR, '.convos') -OPENAI_CLIENT = None - -class ansi: - ''' - Convenience methods for wrapping text with ansi colors - ''' - - _blue = '\033[94m' - _dim = '\033[2m' - _green = '\033[92m' - _red = '\033[91m' - _reset = '\033[0m' - - @staticmethod - def color_text(text, color): - return '\n'.join(f"{color}{line}{ansi._reset}" for line in str(text).splitlines()) - - @staticmethod - def blue(text): - return ansi.color_text(text, ansi._blue) - - @staticmethod - def dim(text): - return ansi.color_text(text, ansi._dim) - - @staticmethod - def green(text): - return ansi.color_text(text, ansi._green) - - @staticmethod - def red(text): - return ansi.color_text(text, ansi._red) - -def call_gpt(messages): - global OPENAI_CLIENT - if OPENAI_CLIENT is None: - token = os.environ.get( - 'GPT_CMD_TOKEN', - read_file(OPTIONS['token_file_path']) - ) - OPENAI_CLIENT = OpenAI(api_key=token) - - response = OPENAI_CLIENT.chat.completions.create( - model=OPTIONS['model'], - response_format={ "type": "json_object" }, - messages=messages, - ) - return response.choices[0].message.content - -def exec_cmd(command): - result = subprocess.run( - command, - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - text=True - ) - return result.stdout.strip(), result.returncode - -def clear_prev_line(): - sys.stdout.write('\x1b[1A') - sys.stdout.write('\x1b[2K') - sys.stdout.flush() - -def prompt_user_yn(prompt): - index = 0 - while True: - if index > 0: - clear_prev_line() - - response = input(f'{prompt} (Y/n) ').strip().lower() - if response in ['y', 'n', '']: - return response == 'y' or response == '' - index += 1 - -def main(goal): - convo_timestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") - convo_file_name = None - system_info=f'System info:\nOS: {sys.platform}\nArchitecture: {platform.machine()}' - messages = [ - {"role": "system", "content": SYSTEM_PROMPT}, - {"role": "user", "content": f'{goal}\n{system_info}'} - ] - - def save_convo(): - file_name = convo_timestamp - if convo_file_name is not None: - file_name = f'{convo_file_name}_{convo_timestamp}' - file_name += '.json' - - file_path = os.path.join(CONVOS_DIR, file_name) - ensure_dir(CONVOS_DIR) - write_file(file_path, json.dumps(messages, indent=2)) - - print(f"{ansi.blue('Goal:')} {goal}") - while True: - raw_response = call_gpt(messages) - messages.append({"role": "assistant", "content": raw_response}) - response = json.loads(raw_response) - - if convo_file_name is None and len(response.get('convo-file-name', '')) > 0: - convo_file_name = f"{response['convo-file-name']}_{convo_timestamp}" - - print('\n----------') - - if isinstance(response.get('status'), str): - was_success = response['status'] == 'success' - - if was_success: - print(ansi.green('✅ Goal successfully achieved.')) - else: - print(ansi.red('❌ Goal failed.')) - - if isinstance(response.get('context'), str): - print(response['context']) - - save_convo() - sys.exit(0 if was_success else 1) - - if isinstance(response.get('context'), str): - print(f"{ansi.blue('Context:')} {response['context']}") - - if isinstance(response.get('commands'), list): - cmd_results = [] - for index, cmd in enumerate(response['commands']): - if index > 0: - print('') - - print(f"{ansi.blue('Command:')} {ansi.dim(cmd)}") - if not OPTIONS['dangerously_skip_prompts']: - if prompt_user_yn('OK to run command?'): - clear_prev_line() - else: - save_convo() - sys.exit(1) - stdout, exit_code = exec_cmd(cmd) - - cmd_ansi_color = ansi.green if exit_code == 0 else ansi.red - print(f"{cmd_ansi_color('Exit code:')} {ansi.dim(exit_code)}") - print(ansi.dim(stdout)) - cmd_results.append({"command": "cmd", "stdout": stdout, "exit_code": exit_code}) - - if exit_code != 0: - break - - messages.append({"role": "user", "content": json.dumps(cmd_results)}) - else: - print(ansi.red('ERROR: No further commands provided, and no success/failure signal was provided')) - save_convo() - sys.exit(1) - -if __name__ == "__main__": - helptext = 'Usage:\ngpt_cmd \ngpt_cmd --get-convos-dir' - - if len(sys.argv) != 2: - print(helptext) - sys.sys.exit(1) - - if sys.argv[1] == '--help': - print(helptext) - sys.exit(0) - - if sys.argv[1] == '--get-convos-dir': - print(CONVOS_DIR) - sys.exit(0) - - goal = sys.argv[1] - main(goal) diff --git a/gpt_cmd.spec b/gpt_cmd.spec deleted file mode 100644 index 3efab29..0000000 --- a/gpt_cmd.spec +++ /dev/null @@ -1,38 +0,0 @@ -# -*- mode: python ; coding: utf-8 -*- - - -a = Analysis( - ['gpt_cmd.py'], - pathex=[], - binaries=[], - datas=[], - hiddenimports=[], - hookspath=[], - hooksconfig={}, - runtime_hooks=[], - excludes=[], - noarchive=False, - optimize=0, -) -pyz = PYZ(a.pure) - -exe = EXE( - pyz, - a.scripts, - a.binaries, - a.datas, - [], - name='gpt_cmd', - debug=False, - bootloader_ignore_signals=False, - strip=False, - upx=True, - upx_exclude=[], - runtime_tmpdir=None, - console=True, - disable_windowed_traceback=False, - argv_emulation=False, - target_arch=None, - codesign_identity=None, - entitlements_file=None, -) diff --git a/install.sh b/install.sh index c7f3c31..7dedd38 100755 --- a/install.sh +++ b/install.sh @@ -2,15 +2,8 @@ set -e -if [[ "$OSTYPE" == "linux"* ]]; then - OS="linux" -elif [[ "$OSTYPE" == "darwin"* ]]; then - OS="macos" -else - OS="unknown" -fi - ansi_blue='\033[94m' +ansi_dim='\033[2m' ansi_green='\033[92m' ansi_red='\033[91m' ansi_yellow='\033[93m' @@ -19,6 +12,9 @@ ansi_reset='\033[0m' function print_blue() { printf "${ansi_blue}%b${ansi_reset}" "$1" } +function print_dim() { + printf "${ansi_dim}%b${ansi_reset}" "$1" +} function print_green() { printf "${ansi_green}%b${ansi_reset}" "$1" } @@ -38,8 +34,9 @@ function log_warning() { function fetch_latest_binary() { local github_repo="$1" - local dir_path="$2" - local binary_name="$3" + local os_name="$2" + local dir_path="$3" + local binary_name="$4" # detect which fetch tool is available on the machine local fetch_tool @@ -69,39 +66,43 @@ function fetch_latest_binary() { | sed -E 's/.*"tag_name": "(.*)",/\1/' \ | head -1 \ )" - local binary_urls="$( \ + # this is just used to validate that there are any binary urls + # resolved for the latest release + # (to determine if the GH call failed or if there's just not a file for this OS) + local latest_binary_urls="$( \ echo "$releases_res" \ | grep "releases/download/$latest_version/gpt_cmd-" \ + )" + local os_binary_url="$( \ + echo "$releases_res" \ + | grep "releases/download/$latest_version/gpt_cmd-${os_name}" \ | sed -E 's/[ \t]+"browser_download_url": "([^"]+)",?/\1/' \ )" - local latest_binary_url="" - for url in $binary_urls; do - os="$(echo "$url" | sed -E 's|.*/gpt_cmd-([^.]*).*|\1|')" - if [ "$os" = "$OS" ]; then - latest_binary_url="$url" - break - fi - done - if [ -z "$latest_binary_url" ]; then - local error_file_name="gpt_cmd_install-error_$(date +"%Y-%m-%d_%H-%M-%S").log" - echo -e "ERROR: unable to find release binary\n" >> "$error_file_name" - echo -e "GitHub releases response body:\n$releases_res" >> "$error_file_name" + if [ -z "$os_binary_url" ]; then + echo "" + if [ -n "$latest_binary_urls" ]; then + log_error "no binary found for OS '${os_name}' on latest release" + else + local error_file_name="gpt_cmd_install-error_$(date +"%Y-%m-%d_%H-%M-%S").log" + echo -e "ERROR: unable to find release binary\n" >> "$error_file_name" + echo -e "GitHub releases response body:\n$releases_res" >> "$error_file_name" - log_error "unable to find release binary; see $error_file_name for more info" + log_error "unable to lookup release binaries; see $error_file_name for more info" + fi exit 1 fi # fetch the binary - local file_name="$(basename "$latest_binary_url")" + local file_name="$(basename "$os_binary_url")" local file_path="$dir_path/$file_name" case $fetch_tool in curl) - curl -L -s -S -o "$file_path" "$latest_binary_url";; + curl -L -s -S -o "$file_path" "$os_binary_url";; wget) - wget -q -O "$file_path" "$latest_binary_url";; + wget -q -O "$file_path" "$os_binary_url";; esac if [ ! -e "$file_path" ]; then - log_error "failed to fetch latest release binary ($latest_binary_url)" + log_error "failed to fetch latest release binary ($os_binary_url)" exit 1 fi @@ -111,11 +112,12 @@ function fetch_latest_binary() { function make_binary_executable() { local file_path="$1" + local os_name="$2" chmod +x "$file_path" # try to make MacOS trust the binary file - if [ "$OS" = "macos" ]; then + if [[ "$os_name" == "darwin"* ]]; then if command -v xattr >/dev/null; then if xattr -p com.apple.quarantine "$file_path" &>/dev/null; then xattr -d com.apple.quarantine "$file_path" @@ -142,33 +144,47 @@ function get_profile_file() { done } -function run_install() { - if [ "$OS" = "unknown" ]; then - log_error "OS type '$OSTYPE' not recognized as a supported OS" - exit 1 - fi +function get_os_name() { + local os=$(uname -s | tr '[:upper:]' '[:lower:]') + local arch=$(uname -m) + + case "$arch" in + x86_64) + arch="amd64" ;; + i686) + arch="386" ;; + aarch64) + arch="arm64" ;; + armv7l) + arch="arm" ;; + esac + + echo "${os}-${arch}" +} +function run_install() { + local os_name="$(get_os_name)" local install_dir="$HOME/.gpt_cmd" - echo "Installing to ${install_dir}" + print_blue "Installing to ${install_dir}\n" - print_blue "Attempting to fetch latest binary..." + print_dim "Attempting to fetch latest binary..." local repo_name="chrisdothtml/gpt-cmd" local binary_dir_path="$install_dir/bin" local binary_name="gpt_cmd" mkdir -p "$binary_dir_path" - fetch_latest_binary "$repo_name" "$binary_dir_path" "$binary_name" + fetch_latest_binary "$repo_name" "$os_name" "$binary_dir_path" "$binary_name" echo "✅" - print_blue "Making binary executable on your system..." + print_dim "Making binary executable on your system..." local binary_file_path="$binary_dir_path/$binary_name" - make_binary_executable "$binary_file_path" + make_binary_executable "$binary_file_path" "$os_name" echo "✅" local path_update_str="export PATH=\"${binary_dir_path}:\$PATH\"" local profile_file if ! command -v gpt_cmd >/dev/null; then profile_file="$(get_profile_file)" - print_blue "Exposing binary to PATH..." + print_dim "Exposing binary to PATH..." echo -e "\n$path_update_str" >> "$profile_file" echo "✅" fi diff --git a/main.go b/main.go new file mode 100644 index 0000000..bb47a32 --- /dev/null +++ b/main.go @@ -0,0 +1,9 @@ +package main + +import ( + "gpt_cmd/cmd" +) + +func main() { + cmd.Execute() +} diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 8dfce69..0000000 --- a/requirements.txt +++ /dev/null @@ -1,20 +0,0 @@ -altgraph==0.17.4 -annotated-types==0.7.0 -anyio==4.4.0 -certifi==2024.2.2 -distro==1.9.0 -h11==0.14.0 -httpcore==1.0.5 -httpx==0.27.0 -idna==3.7 -macholib==1.16.3 -openai==1.30.4 -packaging==24.0 -pydantic==2.7.2 -pydantic_core==2.18.3 -pyinstaller==6.7.0 -pyinstaller-hooks-contrib==2024.6 -setuptools==70.0.0 -sniffio==1.3.1 -tqdm==4.66.4 -typing_extensions==4.12.0 diff --git a/utils/log_utils.go b/utils/log_utils.go new file mode 100644 index 0000000..a69aa8f --- /dev/null +++ b/utils/log_utils.go @@ -0,0 +1,68 @@ +package utils + +import ( + "bufio" + "fmt" + "os" + "strings" +) + +type Ansi struct{} + +const ( + blue = "\033[94m" + dim = "\033[2m" + green = "\033[92m" + red = "\033[91m" + reset = "\033[0m" +) + +func (a Ansi) colorText(text, color string) string { + lines := strings.Split(text, "\n") + for i, line := range lines { + lines[i] = color + line + reset + } + return strings.Join(lines, "\n") +} + +func (a Ansi) Blue(text string) string { + return a.colorText(text, blue) +} + +func (a Ansi) Dim(text string) string { + return a.colorText(text, dim) +} + +func (a Ansi) Green(text string) string { + return a.colorText(text, green) +} + +func (a Ansi) Red(text string) string { + return a.colorText(text, red) +} + +func ClearPrevLine() { + fmt.Print("\033[1A") + fmt.Print("\033[2K") +} + +func PromptUserYN(prompt string) bool { + reader := bufio.NewReader(os.Stdin) + index := 0 + + for { + if index > 0 { + ClearPrevLine() + } + + fmt.Printf("%s (Y/n) ", prompt) + response, _ := reader.ReadString('\n') + response = strings.TrimSpace(strings.ToLower(response)) + + if response == "y" || response == "n" || response == "" { + return response == "y" || response == "" + } + + index++ + } +} diff --git a/utils/utils.go b/utils/utils.go new file mode 100644 index 0000000..5c43d4d --- /dev/null +++ b/utils/utils.go @@ -0,0 +1,71 @@ +package utils + +import ( + "bytes" + "encoding/json" + "os" + "os/exec" + "os/user" + "strings" +) + +func GetHomeDir() string { + usr, err := user.Current() + if err != nil { + panic(err) + } + return usr.HomeDir +} + +func GetEnv(key, defaultValue string) string { + if value, exists := os.LookupEnv(key); exists { + return value + } + return defaultValue +} + +func EnsureDir(directory string) { + if _, err := os.Stat(directory); os.IsNotExist(err) { + os.MkdirAll(directory, os.ModePerm) + } +} + +func WriteFile(filePath, content string) { + err := os.WriteFile(filePath, []byte(content), 0644) + if err != nil { + panic(err) + } +} + +func ExecCmd(command string) (string, int) { + var out bytes.Buffer + + cmd := exec.Command("sh", "-c", command) + cmd.Stdout = &out + cmd.Stderr = &out + err := cmd.Run() + + exitCode := 0 + if err != nil { + exitCode = cmd.ProcessState.ExitCode() + } + + return strings.TrimSpace(out.String()), exitCode +} + +func JsonStringify(input interface{}, useIndent bool) string { + var data []byte + var err error + + if useIndent { + data, err = json.MarshalIndent(input, "", " ") + } else { + data, err = json.Marshal(input) + } + + if err != nil { + panic(err) + } + + return string(data) +}