diff --git a/README.md b/README.md index 1c53d34..27bbf90 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ Launch the chat console by typing `charla` in your terminal, or view all availab * Switch between single-line and multi-line input modes without interrupting the chat session. * Store default user preferences in a settings file. * Provide a system prompt for a chat session. -* Load content from local files to add to prompts. +* Load content from local files to use them as prompts. ## Configuration diff --git a/charla/chat.py b/charla/chat.py index ec465f6..73c8fb4 100644 --- a/charla/chat.py +++ b/charla/chat.py @@ -25,7 +25,7 @@ Press RETURN to send prompt in single line mode. Press ALT+M to switch between single and multi line mode. Press ALT+RETURN to send prompt in multi line mode. -Press CTRL-O to open file and send its content in the prompt. +Press CTRL-O to open file and send its content as the prompt. Press CTRL-R or CTRL-S to search prompt history. Press ↑ and ↓ to navigate previously entered prompts. Press → to complete an auto suggested prompt. @@ -41,6 +41,8 @@ def available_models() -> None | list[str]: def generate(model: str, prompt: str, context: list, output: list, system=None) -> list: + """Generate and print a response to the prompt and return the context.""" + stream = ollama.generate(model=model, prompt=prompt, context=context, stream=True, system=system) text = '' @@ -56,6 +58,8 @@ def generate(model: str, prompt: str, context: list, output: list, system=None) def prompt_session(argv) -> PromptSession: + """Create and return a PromptSession object.""" + session: PromptSession = PromptSession(message=t_prompt_ml if argv.multiline else t_prompt, history=FileHistory(argv.prompt_history), auto_suggest=AutoSuggestFromHistory(), @@ -81,6 +85,8 @@ def fetch(_event): def run(argv: argparse.Namespace) -> None: + """Run the chat session.""" + context: list[int] = [] # Store conversation history to make the model context aware output = [f'# Chat with: {argv.model}\n'] # List to store output text @@ -127,6 +133,8 @@ def run(argv: argparse.Namespace) -> None: def save(chats_path: Path, output: list[str], model_name: str) -> None: + """Save the chat as a markdown file.""" + if len(output) > 1: now = datetime.strftime(datetime.now(), '%Y-%m-%d-%H-%M-%S') slug = re.sub(r'\W', '-', model_name) diff --git a/charla/cli.py b/charla/cli.py index 6de56aa..70397eb 100755 --- a/charla/cli.py +++ b/charla/cli.py @@ -8,6 +8,8 @@ def handle_models(argv): + """Handler for models subcommand.""" + if argv.verbose: print(json.dumps(argv.models, indent=4)) else: @@ -15,6 +17,8 @@ def handle_models(argv): def main(): + """Create and execute command line interface.""" + if (models := chat.available_models()) is None: sys.exit('No language models available.') model_names = [m['name'] for m in models] diff --git a/charla/config.py b/charla/config.py index 19d1577..a3156e8 100644 --- a/charla/config.py +++ b/charla/config.py @@ -18,6 +18,7 @@ def load() -> dict: """Return settings from settings file, if it exists.""" + if path_settings.exists(): try: return json.loads(path_settings.read_text()) @@ -27,6 +28,8 @@ def load() -> dict: def mkdir(path: Path, **kwds): + """Wrapper for pathlib's mkdir.""" + try: path.mkdir(**kwds) except PermissionError as err: @@ -34,11 +37,15 @@ def mkdir(path: Path, **kwds): def settings(user_settings: dict) -> dict: + """Return settings based on user input.""" + default_settings.update(user_settings) return default_settings def manage(argv: argparse.Namespace) -> None: + """Handler for settings subcommand.""" + if argv.location: print(path_settings) else: