From 7b1dd8d94b4b4534e3babe404d751c2e848bdb10 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Wed, 13 Sep 2023 08:35:43 -0700 Subject: [PATCH] v0 litellm --- camel/model_backend.py | 3 ++- requirements.txt | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/camel/model_backend.py b/camel/model_backend.py index 5d895b449..58d2d058a 100644 --- a/camel/model_backend.py +++ b/camel/model_backend.py @@ -15,6 +15,7 @@ from typing import Any, Dict import openai +import litellm import tiktoken from camel.typing import ModelType @@ -66,7 +67,7 @@ def run(self, *args, **kwargs) -> Dict[str, Any]: num_max_token = num_max_token_map[self.model_type.value] num_max_completion_tokens = num_max_token - num_prompt_tokens self.model_config_dict['max_tokens'] = num_max_completion_tokens - response = openai.ChatCompletion.create(*args, **kwargs, + response = litellm.completion(*args, **kwargs, model=self.model_type.value, **self.model_config_dict) diff --git a/requirements.txt b/requirements.txt index 3f53d1212..6943b5b84 100644 --- a/requirements.txt +++ b/requirements.txt @@ -29,6 +29,7 @@ jedi==0.18.2 jieba==0.42.1 Jinja2==3.1.2 lazy-object-proxy==1.9.0 +litellm>=0.1.609 Markdown==3.4.4 MarkupSafe==2.1.3 mccabe==0.7.0