Skip to content

Commit 8fa2396

Browse files
committed
upgrade dependencies
1 parent a5293d0 commit 8fa2396

File tree

8 files changed

+54
-56
lines changed

8 files changed

+54
-56
lines changed

README.md

+10-29
Original file line numberDiff line numberDiff line change
@@ -68,45 +68,26 @@ hints to the student to arrive at correct answer, enhancing student engagement a
6868

6969
### Quick Setup Option
7070

71+
- Make sure you have already installed docker (https://docs.docker.com/get-docker/) and docker-compose (https://docs.docker.com/compose/)
72+
7173
- Clone the Repository
7274
```python
7375
$ git clone https://github.com/rohitc5/intel-oneAPI/
74-
$ cd Intel-oneAPI
76+
$ cd intel-oneAPI
7577

7678
```
77-
- Start the LEAP RESTFul Service to consume both components (Ask Question/Doubt and Interactive Conversational AI Examiner) as a REST API
79+
- Start the LEAP RESTFul Service to consume both components (Ask Question/Doubt and Interactive Conversational AI Examiner) as a REST API.
80+
Also Start the webapp demo build using streamlit.
7881

7982
```python
80-
$ cd api
83+
# build using docker compose
84+
$ docker-compose build
8185

82-
# build the docker file
83-
$ docker build -t leap-api:v1 .
84-
85-
# get the docker image ID
86-
$ docker images
87-
88-
# run the docker container
89-
$ docker run -it -p 8500:8500 --name=leap-api [IMAGE_ID]
90-
91-
$ cd ../
86+
# start the services
87+
$ docker-compose up
9288

9389
```
9490

95-
- Start the demo webapp build using streamlit
96-
97-
```python
98-
$ cd webapp
99-
100-
# build the docker file
101-
$ docker build -t leap-demo:v1 .
102-
103-
# get the docker image ID
104-
$ docker images
105-
106-
# run the docker container
107-
$ docker run -it -p 8502:8502 --name=leap-demo [IMAGE_ID]
108-
109-
```
11091
- Go to http://localhost:8502
11192

11293
### Manual Setup Option
@@ -115,7 +96,7 @@ hints to the student to arrive at correct answer, enhancing student engagement a
11596

11697
```python
11798
$ git clone https://github.com/rohitc5/intel-oneAPI/
118-
$ cd Intel-oneAPI
99+
$ cd intel-oneAPI
119100

120101
```
121102

api/requirements.txt

+6-6
Original file line numberDiff line numberDiff line change
@@ -7,14 +7,14 @@ Cython==0.29.35
77
pandas==2.0.2
88
tiktoken==0.4.0
99
langchain==0.0.191
10+
openai==0.27.8
1011
faiss-cpu==1.7.4
12+
nltk==3.8.1
1113
torch==2.0.1
12-
intel_extension_for_pytorch==2.0.100
1314
transformers==4.29.2
14-
optimum[neural-compressor]==1.8.6
15+
optimum[neural-compressor]==1.8.7
1516
neural-compressor==2.1.1
16-
optimum-intel==1.8.1
17-
nltk==3.8.1
17+
webvtt-py==0.4.6
18+
intel_extension_for_pytorch==2.0.100
1819
scikit-learn==1.2.2
19-
scikit-learn-intelex==2023.1.1
20-
webvtt-py==0.4.6
20+
scikit-learn-intelex==2023.1.1

api/src/config.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
ASK_DOUBT_CONFIG = {
77
"emb_model_name_or_path": "ai4bharat/indic-bert",
88
"emb_model_type": "semantic", #options: syntactic, semantic
9-
"qa_model_name_or_path": "vanichandna/indic-bert-finetuned-squad",
9+
"qa_model_name_or_path": "rohitsroch/indic-mALBERT-squad-v2",
1010
"qa_model_type": "vanilla_fp32", #options: vanilla_fp32, quantized_int8
1111

1212
"intel_scikit_learn_enabled": True,
@@ -19,9 +19,9 @@
1919

2020

2121
AI_EXAMINER_CONFIG = {
22-
"llm_name": "hf_pipeline", # azure_gpt3, hf_pipeline
22+
"llm_name": "azure_gpt3", #options: azure_gpt3, hf_pipeline
2323
"azure_deployment_name": "text-davinci-003-prod",
24-
"hf_model_name": "TheBloke/falcon-7b-instruct-GPTQ", # mosaicml/mpt-7b-instruct
24+
"hf_model_name": "mosaicml/mpt-7b-instruct", # mosaicml/mpt-7b-instruct
2525
"device": 0, # cuda:0
2626
"llm_kwargs":{
2727
"do_sample": True,
@@ -44,4 +44,4 @@
4444
# The base URL for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource.
4545
os.environ["OPENAI_API_BASE"] = "https://c5-openai-research.openai.azure.com/"
4646
# The API key for your Azure OpenAI resource. You can find this in the Azure portal under your Azure OpenAI resource.
47-
os.environ["OPENAI_API_KEY"] = "<your_key>"
47+
os.environ["OPENAI_API_KEY"] = "ec7bf13531a84991a8938c3722e03285"

api/src/core/llm.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def get_llm(llm_name="azure_gpt3",
2020
max_tokens=kwargs["llm_kwargs"].get("max_new_tokens", 300),
2121
n=kwargs["llm_kwargs"].get("num_return_sequences", 1),
2222
top_p=kwargs["llm_kwargs"].get("top_p", 1.0),
23-
frequency_penalty=kwargs["llm_kwargs"].get("repetition_penalty", 0)
23+
frequency_penalty=kwargs["llm_kwargs"].get("repetition_penalty", 1.1)
2424
)
2525
elif llm_name == "hf_pipeline":
2626
llm = HuggingFacePipeline.from_model_id(

docker-compose.yml

+22
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
version: '3.4'
2+
services:
3+
leap-api:
4+
restart: always
5+
build:
6+
context: "./api/."
7+
dockerfile: ./Dockerfile
8+
ports:
9+
- "8500:8500"
10+
volumes:
11+
- ./api:/opt
12+
leap-webapp:
13+
restart: always
14+
build:
15+
context: "./webapp/."
16+
dockerfile: ./Dockerfile
17+
ports:
18+
- "8502:8502"
19+
volumes:
20+
- ./webapp:/opt
21+
depends_on:
22+
- leap-api

nlp/requirements.txt

+10-11
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,18 @@
11
pydantic==1.10.8
2-
torch==2.0.1
3-
intel_extension_for_pytorch==2.0.100
4-
transformers==4.29.2
52
datasets==2.12.0
63
evaluate==0.4.0
74
mlflow==2.3.2
8-
accelerate==0.19.0
9-
optimum[neural-compressor]==1.8.6
10-
neural-compressor==2.1.1
11-
optimum-intel==1.8.1
5+
accelerate==0.20.3
126
nltk==3.8.1
13-
webvtt-py==0.4.6
14-
scikit-learn==1.2.2
15-
scikit-learn-intelex==2023.1.1
7+
torch==2.0.1
8+
transformers==4.29.2
169
pandas==2.0.2
1710
modin==0.22.1
1811
wordcloud==1.9.2
19-
matplotlib==3.7.1
12+
matplotlib==3.7.1
13+
optimum[neural-compressor]==1.8.7
14+
neural-compressor==2.1.1
15+
webvtt-py==0.4.6
16+
intel_extension_for_pytorch==2.0.100
17+
scikit-learn==1.2.2
18+
scikit-learn-intelex==2023.1.1

webapp/requirements.txt

+1
Original file line numberDiff line numberDiff line change
@@ -8,3 +8,4 @@ PyYAML==6.0
88
moviepy==1.0.3
99
pydub==0.25.1
1010
SpeechRecognition==3.10.0
11+
requests==2.31.0

webapp/ui/api_handler.py

-5
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,4 @@
1-
import json
21
import requests
3-
import requests
4-
import streamlit as st
5-
from config import API_CONFIG
62

73
from utils.logging_handler import Logger
84

@@ -13,7 +9,6 @@ class PredictAskDoubt(object):
139
def __init__(self, server_config):
1410
self.server_config = server_config
1511

16-
# @st.cache_data
1712
def predict_ask_doubt(self, payload):
1813
"""POST request"""
1914

0 commit comments

Comments
 (0)