Skip to content

Commit

Permalink
Simplify LLM config (#120)
Browse files Browse the repository at this point in the history
  • Loading branch information
AnirudhDagar authored Nov 13, 2024
1 parent 64fb669 commit 96062ca
Show file tree
Hide file tree
Showing 6 changed files with 5 additions and 15 deletions.
7 changes: 2 additions & 5 deletions src/autogluon_assistant/configs/best_quality.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,8 @@ save_artifacts:
feature_transformers:
- _target_: autogluon_assistant.transformer.CAAFETransformer
eval_model: lightgbm
llm_provider: bedrock
llm_model: "anthropic.claude-3-5-sonnet-20241022-v2:0"
# llm_provider: openai
# llm_model: gpt-4o-2024-08-06
llm_provider: ${llm.provider}
llm_model: ${llm.model}
num_iterations: 5
optimization_metric: roc
- _target_: autogluon_assistant.transformer.OpenFETransformer
Expand All @@ -30,7 +28,6 @@ llm:
provider: bedrock
model: "anthropic.claude-3-5-sonnet-20241022-v2:0"
# provider: openai
# api_key_location: OPENAI_API_KEY
# model: gpt-4o-2024-08-06
max_tokens: 512
proxy_url: null
Expand Down
1 change: 0 additions & 1 deletion src/autogluon_assistant/configs/high_quality.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ llm:
provider: bedrock
model: "anthropic.claude-3-5-sonnet-20241022-v2:0"
# provider: openai
# api_key_location: OPENAI_API_KEY
# model: gpt-4o-2024-08-06
max_tokens: 512
proxy_url: null
Expand Down
1 change: 0 additions & 1 deletion src/autogluon_assistant/configs/medium_quality.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@ llm:
provider: bedrock
model: "anthropic.claude-3-5-sonnet-20241022-v2:0"
# provider: openai
# api_key_location: OPENAI_API_KEY
# model: gpt-4o-2024-08-06
max_tokens: 512
proxy_url: null
Expand Down
6 changes: 3 additions & 3 deletions src/autogluon_assistant/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,10 +140,10 @@ def get_valid_providers(cls):

@staticmethod
def _get_openai_chat_model(config: DictConfig) -> AssistantChatOpenAI:
if config.api_key_location in os.environ:
api_key = os.environ[config.api_key_location]
if "OPENAI_API_KEY" in os.environ:
api_key = os.environ["OPENAI_API_KEY"]
else:
raise Exception("OpenAI API env variable not set")
raise Exception("OpenAI API env variable OPENAI_API_KEY not set")

logger.info(f"AGA is using model {config.model} from OpenAI to assist you with the task.")

Expand Down
3 changes: 0 additions & 3 deletions src/autogluon_assistant/ui/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,6 @@
# Provider configuration
PROVIDER_MAPPING = {"Claude 3.5 with Amazon Bedrock": "bedrock", "GPT 4o": "openai"}

# TODO: Remove model specific mappings
API_KEY_LOCATION = {"GPT 4o": "OPENAI_API_KEY"}

INITIAL_STAGE = {
"Task Understanding": [],
"Feature Generation": [],
Expand Down
2 changes: 0 additions & 2 deletions src/autogluon_assistant/ui/pages/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import requests
import streamlit as st
from constants import (
API_KEY_LOCATION,
BASE_DATA_DIR,
CAPTIONS,
DATASET_OPTIONS,
Expand Down Expand Up @@ -50,7 +49,6 @@ def update_config_overrides():
if st.session_state.llm:
config_overrides.append(f"llm.model={LLM_MAPPING[st.session_state.llm]}")
config_overrides.append(f"llm.provider={PROVIDER_MAPPING[st.session_state.llm]}")
config_overrides.append(f"llm.api_key_location={API_KEY_LOCATION[st.session_state.llm]}")

if not st.session_state.feature_generation:
config_overrides.append("feature_transformers=[]")
Expand Down

0 comments on commit 96062ca

Please sign in to comment.