Skip to content

Commit 8802df5

Browse files
authored
Merge pull request #803 from llmware-ai/update-prompt-sources-example
updating prompt_with_sources example
2 parents dc358aa + cb4e413 commit 8802df5

File tree

1 file changed

+18
-4
lines changed

1 file changed

+18
-4
lines changed

Diff for: examples/Prompts/prompt_with_sources.py

+18-4
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from llmware.models import PromptCatalog
1111
from llmware.library import Library
1212
from llmware.retrieval import Query
13+
from llmware.configs import LLMWareConfig
1314

1415

1516
def prompt_with_sources(model_name, library_name):
@@ -27,6 +28,11 @@ def prompt_with_sources(model_name, library_name):
2728

2829
prompter = Prompt().load_model(model_name)
2930

31+
# Use #1 - add_source_document - parses the document in memory, filters the text chunks by query, and then
32+
# creates a 'source' context to be passed to the model
33+
34+
print(f"\n#1 - add a source document file directly into a prompt")
35+
3036
sources2 = prompter.add_source_document(ingestion_folder_path, local_file, query="base salary")
3137

3238
prompt = "What is the base salary amount?"
@@ -35,14 +41,24 @@ def prompt_with_sources(model_name, library_name):
3541
print (f"- Context: {local_file}\n- Prompt: {prompt}\n- LLM Response:\n{response}")
3642
prompter.clear_source_materials()
3743

44+
# Use #2 - add_source_wikipedia - gets a source document from Wikipedia on Barack Obama,
45+
# and creates source context
46+
47+
print(f"\n#2 - add a wikipedia article by api call by topic into a prompt")
48+
3849
prompt = "Was Barack Obama the Prime Minister of Canada?"
3950
wiki_topic = "Barack Obama"
4051
prompt_instruction = "yes_no"
4152
sources3 = prompter.add_source_wikipedia(wiki_topic, article_count=1)
4253
response = prompter.prompt_with_source(prompt=prompt, prompt_name=prompt_instruction)[0]["llm_response"]
43-
print (f"- Context: {local_file}\n- Prompt: {prompt}\n- LLM Response:\n{response}")
54+
print (f"- Context: {wiki_topic}\n- Prompt: {prompt}\n- LLM Response:\n{response}")
4455
prompter.clear_source_materials()
4556

57+
# Use #3 - add_source_query_results - produces the same results as the first case, but runs a query on the library
58+
# and then adds the query results to the prompt which are concatenated into a source context
59+
60+
print(f"\n#3 - run a query on a library and then pass the query results into a prompt")
61+
4662
query_results = Query(library).text_query("base salary")
4763
prompt = "What is the annual rate of the base salary?"
4864
sources4 = prompter.add_source_query_results(query_results)
@@ -55,9 +71,7 @@ def prompt_with_sources(model_name, library_name):
5571

5672
if __name__ == "__main__":
5773

58-
# to use API-based model for this example, set API keys in os.environ variable
59-
# e.g., see example: set_model_api_keys.py
60-
# e.g., os.environ["USER_MANAGED_OPENAI_API_KEY"] = "<insert-your-api-key>"
74+
LLMWareConfig().set_active_db("sqlite")
6175

6276
# this model is a placeholder which will run on local laptop - swap out for higher accuracy, larger models
6377
model_name = "llmware/bling-1b-0.1"

0 commit comments

Comments
 (0)