Skip to content

Commit

Permalink
Add and improve documentation.
Browse files Browse the repository at this point in the history
  • Loading branch information
yaph committed Jul 18, 2024
1 parent 02d2ef6 commit 849f4f2
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 2 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ Launch the chat console by typing `charla` in your terminal, or view all availab
* Switch between single-line and multi-line input modes without interrupting the chat session.
* Store default user preferences in a settings file.
* Provide a system prompt for a chat session.
* Load content from local files to add to prompts.
* Load content from local files to use them as prompts.

## Configuration

Expand Down
10 changes: 9 additions & 1 deletion charla/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
Press RETURN to send prompt in single line mode.
Press ALT+M to switch between single and multi line mode.
Press ALT+RETURN to send prompt in multi line mode.
Press CTRL-O to open file and send its content in the prompt.
Press CTRL-O to open file and send its content as the prompt.
Press CTRL-R or CTRL-S to search prompt history.
Press ↑ and ↓ to navigate previously entered prompts.
Press → to complete an auto suggested prompt.
Expand All @@ -41,6 +41,8 @@ def available_models() -> None | list[str]:


def generate(model: str, prompt: str, context: list, output: list, system=None) -> list:
"""Generate and print a response to the prompt and return the context."""

stream = ollama.generate(model=model, prompt=prompt, context=context, stream=True, system=system)

text = ''
Expand All @@ -56,6 +58,8 @@ def generate(model: str, prompt: str, context: list, output: list, system=None)


def prompt_session(argv) -> PromptSession:
"""Create and return a PromptSession object."""

session: PromptSession = PromptSession(message=t_prompt_ml if argv.multiline else t_prompt,
history=FileHistory(argv.prompt_history),
auto_suggest=AutoSuggestFromHistory(),
Expand All @@ -81,6 +85,8 @@ def fetch(_event):


def run(argv: argparse.Namespace) -> None:
"""Run the chat session."""

context: list[int] = [] # Store conversation history to make the model context aware
output = [f'# Chat with: {argv.model}\n'] # List to store output text

Expand Down Expand Up @@ -127,6 +133,8 @@ def run(argv: argparse.Namespace) -> None:


def save(chats_path: Path, output: list[str], model_name: str) -> None:
"""Save the chat as a markdown file."""

if len(output) > 1:
now = datetime.strftime(datetime.now(), '%Y-%m-%d-%H-%M-%S')
slug = re.sub(r'\W', '-', model_name)
Expand Down
4 changes: 4 additions & 0 deletions charla/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,17 @@


def handle_models(argv):
"""Handler for models subcommand."""

if argv.verbose:
print(json.dumps(argv.models, indent=4))
else:
print('\n'.join(argv.model_names))


def main():
"""Create and execute command line interface."""

if (models := chat.available_models()) is None:
sys.exit('No language models available.')
model_names = [m['name'] for m in models]
Expand Down
7 changes: 7 additions & 0 deletions charla/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

def load() -> dict:
"""Return settings from settings file, if it exists."""

if path_settings.exists():
try:
return json.loads(path_settings.read_text())
Expand All @@ -27,18 +28,24 @@ def load() -> dict:


def mkdir(path: Path, **kwds):
"""Wrapper for pathlib's mkdir."""

try:
path.mkdir(**kwds)
except PermissionError as err:
sys.exit(str(err))


def settings(user_settings: dict) -> dict:
"""Return settings based on user input."""

default_settings.update(user_settings)
return default_settings


def manage(argv: argparse.Namespace) -> None:
"""Handler for settings subcommand."""

if argv.location:
print(path_settings)
else:
Expand Down

0 comments on commit 849f4f2

Please sign in to comment.