From 40a1335fb48c8d7389c506c532c6674395136b81 Mon Sep 17 00:00:00 2001 From: Eli <43382407+eli64s@users.noreply.github.com> Date: Sun, 24 Sep 2023 17:30:35 -0500 Subject: [PATCH] =?UTF-8?q?=E2=80=A3=20Update=20CLI=20to=20run=20in=20offl?= =?UTF-8?q?ine=20mode=20if=20no=20API=20key=20provided.?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyproject.toml | 33 ++++++++++++++++++--------------- readmeai/builder.py | 6 +++--- readmeai/conf.py | 27 +-------------------------- readmeai/conf/conf.toml | 4 +--- readmeai/main.py | 10 +++++++--- 5 files changed, 30 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 00e19d06..52537d41 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,31 +4,34 @@ build-backend = "poetry.core.masonry.api" [tool.poetry] name = "readmeai" -version = "0.3.071" -description = "🚀 Generate beautiful README files automatically, powered by GPT-4 🪐" +version = "0.3.076" +description = "🚀 Generate beautiful README.md files from the terminal. Powered by OpenAI's GPT LLMs 💫" authors = ["Eli <0x.eli.64s@gmail.com>"] license = "MIT" readme = "README.md" homepage = "https://github.com/eli64s/readme-ai" documentation = "https://github.com/eli64s/readme-ai/blob/main/README.md" keywords = [ - 'ai', - 'auto-readme', - 'automated-readme', - 'autonomous-agents', - 'documentation', - 'documentation-generator', - 'gpt-3', - 'gpt-4', - 'llm-agent', - 'markdown', - 'openai', - 'openai-api', 'python', + 'markdown', 'readme', + 'documentation', + 'ai', 'readme-badges', + 'openai', + 'readme-template', + 'shieldsio', + 'readme-md', + 'awesome-readme', + 'readme-generator', 'readme-generation', - 'readme-generator' + 'gpt-3', + 'openai-api', + 'automated-readme', + 'auto-readme', + 'readme-md-generator', + 'gpt-4', + 'llm-agent' ] include = ["readmeai", "readmeai.*"] diff --git a/readmeai/builder.py b/readmeai/builder.py index af01f9b3..9702606b 100644 --- a/readmeai/builder.py +++ b/readmeai/builder.py @@ -235,8 +235,8 @@ def create_directory_tree(repo_path: Path) -> str: tree_str = run_tree_command(repo_path) return f"```bash\n.\n{tree_str}```" except Exception as excinfo: - logger.warning(f"Error generating directory tree: {excinfo}") - return f"```bash\n#{excinfo}```" + logger.warning(f"Error running tree command: {excinfo}") + return "```bash\n # Error generating directory tree.\n```" def run_tree_command(repo_path: Path) -> str: @@ -248,4 +248,4 @@ def run_tree_command(repo_path: Path) -> str: tree_str = "\n".join(tree_lines) return tree_str except subprocess.CalledProcessError as excinfo: - raise Exception(f"Error executing the tree command: {excinfo}") + raise Exception(f"Error running tree command: {excinfo}") diff --git a/readmeai/conf.py b/readmeai/conf.py index 65513a9a..3bbbdd97 100644 --- a/readmeai/conf.py +++ b/readmeai/conf.py @@ -1,14 +1,12 @@ """Pydantic models for the readme-ai application.""" -import os from enum import Enum from pathlib import Path from typing import Dict, List, Optional from urllib.parse import urlparse, urlsplit -import openai from pkg_resources import resource_filename -from pydantic import BaseModel, Field, SecretStr, validator +from pydantic import BaseModel, validator from . import factory, logger @@ -33,29 +31,6 @@ class ApiConfig(BaseModel): tokens_max: int temperature: float offline_mode: bool - api_key: Optional[SecretStr] = Field(default=None) - - @validator("api_key", pre=True, always=True) - def validate_api_key(cls, api_key: Optional[SecretStr]) -> SecretStr: - """Validates if the user's OpenAI API key is valid.""" - if api_key: - api_key_str = api_key - else: - api_key_str = os.environ.get("OPENAI_API_KEY") - - if not api_key_str: - raise ValueError("Exception: invalid OpenAI API key.") - - try: - openai.api_key = api_key_str - openai.Model.list() - except ( - openai.error.AuthenticationError, - openai.error.InvalidRequestError, - ) as excinfo: - raise ValueError("Exception: Invalid OpenAI API key.") from excinfo - - logger.info("OpenAI API key validated.") class GitConfig(BaseModel): diff --git a/readmeai/conf/conf.toml b/readmeai/conf/conf.toml index 067c57ec..3fe44da2 100644 --- a/readmeai/conf/conf.toml +++ b/readmeai/conf/conf.toml @@ -6,7 +6,7 @@ encoding = "cl100k_base" rate_limit = 5 tokens = 650 tokens_max = 3800 -temperature = 1.2 +temperature = 1.1 offline_mode = false # Repository @@ -106,9 +106,7 @@ intro = """ """ tree = """ \n## 📂 Repository Structure\n -```bash {} -``` """ modules = """ \n--- diff --git a/readmeai/main.py b/readmeai/main.py index 2e3cf70b..431c9124 100755 --- a/readmeai/main.py +++ b/readmeai/main.py @@ -33,7 +33,8 @@ async def generate_readme(llm: model.OpenAIHandler, offline: bool) -> None: try: temp_dir = utils.clone_repo_to_temp_dir(repository) - config.md.tree = builder.create_directory_tree(temp_dir) + tree = builder.create_directory_tree(temp_dir) + config.md.tree = config.md.tree.format(tree) logger.info(f"Directory tree: {config.md.tree}") scanner = preprocess.RepositoryParserWrapper(config, config_helper) @@ -125,7 +126,7 @@ async def generate_markdown_text( @click.option( "-t", "--temperature", - default=0.9, + default=1.1, help="OpenAI's temperature parameter, a higher value increases randomness.", ) @click.option( @@ -150,7 +151,6 @@ def cli( ) -> None: """Cli entrypoint for readme-ai pypi package.""" config.paths.readme = output - config.api.api_key = api_key config.api.engine = engine config.api.temperature = temperature config.api.offline_mode = offline_mode @@ -160,6 +160,10 @@ def cli( logger.info(f"OpenAI Engine: {config.api.engine}") logger.info(f"OpenAI Temperature: {config.api.temperature}") + if not api_key: + logger.error("API key not found, running in offline mode...") + offline_mode = True + asyncio.run(main(repository, offline_mode)) logger.info("README-AI execution complete.")