Skip to content

Commit

Permalink
Revert "Merge remote-tracking branch 'origin/develop-ai'"
Browse files Browse the repository at this point in the history
This reverts commit e13a1ce, reversing
changes made to 21304b8.
  • Loading branch information
Borikhs committed May 6, 2024
1 parent e13a1ce commit 140d004
Show file tree
Hide file tree
Showing 11 changed files with 37 additions and 51 deletions.
3 changes: 1 addition & 2 deletions ai/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,4 @@
*.pyc
__pycache__
data/
cap30.pem
chatbot.pem
cap30.pem
9 changes: 0 additions & 9 deletions ai/deepl_test.py

This file was deleted.

Binary file modified ai/llm/__pycache__/llm_rag.cpython-312.pyc
Binary file not shown.
Binary file modified ai/llm/__pycache__/llm_rag.cpython-39.pyc
Binary file not shown.
15 changes: 0 additions & 15 deletions ai/llm/deepl_translator.py

This file was deleted.

35 changes: 18 additions & 17 deletions ai/llm/llm_rag.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,8 @@
from langchain_core.runnables import RunnablePassthrough, RunnableLambda
from langchain_openai import ChatOpenAI
from tavily import TavilyClient
from llm.prompt import casual_prompt, is_qna_prompt, combine_result_prompt, score_prompt
from llm.prompt import casual_prompt, is_qna_prompt, combine_result_prompt, score_prompt, translate_prompt
from langchain.retrievers.multi_query import MultiQueryRetriever
import deepl
import os

class LLM_RAG:
Expand All @@ -19,9 +18,7 @@ def __init__(self, trace = False):
self.is_qna_prompt = is_qna_prompt()
self.combine_result_prompt = combine_result_prompt()
self.score_prompt = score_prompt()
self.deepl = deepl.Translator(os.getenv("DEEPL_API_KEY"))
self.ko_query = None
self.result_lang = None
self.translate_prompt = translate_prompt()
self.notice_retriever = None
self.school_retriever = None
self.notice_multiquery_retriever = None
Expand Down Expand Up @@ -97,37 +94,41 @@ def set_chain(self):
self.score_route
)

self.translate_chain = (
self.translate_prompt
| self.llm
| StrOutputParser()
)

def qna_route(self, info):
if "question" in info["topic"].lower():
self.result = self.rag_combine_chain.invoke(self.ko_query)
score = self.score_chain.invoke({"question" : self.ko_query, "answer": self.result})
self.score_invoke_chain.invoke({"score" : score, "question": self.ko_query})

self.result = self.rag_combine_chain.invoke(info["question"])
score = self.score_chain.invoke({"question" : self.question, "answer": self.result})
self.score_invoke_chain.invoke({"score" : score, "question": self.question})

elif "casual" in info["topic"].lower():
self.result = self.casual_answer_chain.invoke(self.question)
self.result = self.casual_answer_chain.invoke(info['question'])

else:
self.result = self.rag_combine_chain.invoke(self.question)
self.result = self.rag_combine_chain.invoke(info["question"])


def score_route(self, info):
if "good" in info["score"].lower():
self.result = self.deepl.translate_text(self.result, target_lang=self.result_lang).text
return self.result
else:
print('-- google search --')
content = self.tavily.qna_search(query='국민대학교 ' + self.ko_query)
self.result = "I couldn't find the answer, so I searched on Google.\n\n" + content
self.result = self.deepl.translate_text(self.result, target_lang=self.result_lang).text
content = self.tavily.qna_search(query='국민대학교 ' + self.question)
self.result = "답을 찾을 수 없어서 구글에 검색했습니다.\n\n"
self.result += self.translate_chain.invoke({'content' : content, 'question':self.question})
return self.result

def format_docs(self, docs):
# 검색한 문서 결과를 하나의 문단으로 합쳐줍니다.
return "\n\n".join(doc.page_content + '\nmetadata=' + str(doc.metadata) for doc in docs)

def query(self, question, result_lang):
def query(self, question):
self.question = question
self.ko_query = self.deepl.translate_text(self.question, target_lang='ko').text
self.result_lang = result_lang
self.qna_route_chain.invoke(question)
return self.result
9 changes: 9 additions & 0 deletions ai/llm/prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,13 @@ def score_prompt():
Classification:"""
)
return prompt

def translate_prompt():
prompt = PromptTemplate.from_template("""
You are a translator with vast knowledge of human languages. Translate the content into the language corresponding to the question. You should only translate and never answer questions.
question : {question}
content : {content}
result :""")
return prompt
3 changes: 1 addition & 2 deletions ai/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,4 @@ langchainhub==0.1.15
beautifulsoup4==4.12.3
pypdf==4.1.0
python-dotenv==1.0.1
langchain-text-splitters==0.0.1
deepl
langchain-text-splitters==0.0.1
3 changes: 1 addition & 2 deletions ai/run_chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
load_dotenv()

os.environ['OPENAI_API_KEY'] = os.getenv('OPENAI_API_KEY')
os.environ['DEEPL_API_KEY'] = os.getenv('DEEPL_API_KEY')

# LangSmith 사용시 아래 주석을 해제
# os.environ['LANGCHAIN_API_KEY'] = os.getenv('LANGCHAIN_API_KEY')
Expand Down Expand Up @@ -41,5 +40,5 @@
if q == str(0):
break
print('AI : ', end='')
print(llm.query(q, 'ZH'))
print(llm.query(q))
print()
11 changes: 7 additions & 4 deletions ai/server.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@

class Query(BaseModel):
query: str
target_lang: str

@asynccontextmanager
async def lifespan(app:FastAPI):
Expand Down Expand Up @@ -59,14 +58,18 @@ async def lifespan(app:FastAPI):
async def initiate():
return "안녕하세요! 국민대학교 전용 챗봇 KUKU입니다. 국민대학교에 대한 건 모든 질문해주세요!"

@app.post("/api/chatbot")
@app.post("/query")
async def query(query: Query):
return {'success': 'True',
return {'code': '200',
'message': 'success',
'response': {
'answer': llm.query(query.query, query.target_lang)
'answer': llm.query(query.query)
}}

@app.post("/input")
async def input(data: UploadFile):
vdb.add_content(data.file)
return


if __name__ == "__main__":
Expand Down
Binary file modified ai/vectordb/__pycache__/vector_db.cpython-39.pyc
Binary file not shown.

0 comments on commit 140d004

Please sign in to comment.