Skip to content

Commit

Permalink
- separated prompters into their own files to simplify verification;
Browse files Browse the repository at this point in the history
- added two more ablated prompters to help evaluate CoverUp;
  • Loading branch information
jaltmayerpizzorno committed Jan 21, 2025
1 parent 155779e commit 681f499
Show file tree
Hide file tree
Showing 10 changed files with 408 additions and 368 deletions.
34 changes: 28 additions & 6 deletions src/coverup/coverup.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,34 @@

from . import llm
from .segment import *
from .prompt.prompter import Prompter
from .testrunner import *
from .version import __version__
from .utils import summary_coverage
from . import prompt


def get_prompters() -> dict[str, Prompter]:
# in the future, we may dynamically load based on file names.

from .prompt.gpt_v1 import GptV1Prompter
from .prompt.gpt_v2 import GptV2Prompter
from .prompt.gpt_v2_no_coverage import GptV2NoCoveragePrompter
from .prompt.gpt_v2_no_coverage_no_function import GptV2NoCoverageNoFunctionPrompter
from .prompt.gpt_v2_fully_ablated import GptV2FullyAblatedPrompter
from .prompt.claude import ClaudePrompter

return {
"gpt-v1": GptV1Prompter,
"gpt-v2": GptV2Prompter,
"gpt-v2-no-coverage": GptV2NoCoveragePrompter,
"gpt-v2-no-coverage-no-function": GptV2NoCoverageNoFunctionPrompter,
"gpt-v2-fully-ablated": GptV2FullyAblatedPrompter,
"claude": ClaudePrompter
}


prompter_registry = get_prompters()


def parse_args(args=None):
import argparse
Expand Down Expand Up @@ -55,9 +77,9 @@ def default_model():
ap.add_argument('--model', type=str, default=default_model(),
help='OpenAI model to use')

ap.add_argument('--prompt-family', type=str,
choices=list(prompt.prompters.keys()),
default='gpt',
ap.add_argument('--prompt', '--prompt-family', type=str,
choices=list(prompter_registry.keys()),
default='gpt-v2',
help='Prompt style to use')

ap.add_argument('--model-temperature', type=float, default=0,
Expand Down Expand Up @@ -476,7 +498,7 @@ def extract_python(response: str) -> str:

state = None

async def improve_coverage(chatter: llm.Chatter, prompter: prompt.Prompter, seg: CodeSegment) -> bool:
async def improve_coverage(chatter: llm.Chatter, prompter: Prompter, seg: CodeSegment) -> bool:
"""Works to improve coverage for a code segment."""
global args

Expand Down Expand Up @@ -615,7 +637,7 @@ def main():
if args.rate_limit:
chatter.set_token_rate_limit((args.rate_limit, 60))

prompter = prompt.prompters[args.prompt_family](args=args)
prompter = prompter_registry[args.prompt](cmd_args=args)
for f in prompter.get_functions():
chatter.add_function(f)

Expand Down
Loading

0 comments on commit 681f499

Please sign in to comment.