Skip to content

Commit 4582e53

Browse files
authored
Remove FaqGen from ProductivitySuite (#1709)
Signed-off-by: Xinyao Wang <[email protected]>
1 parent 566ffb2 commit 4582e53

File tree

5 files changed

+6
-85
lines changed

5 files changed

+6
-85
lines changed

ProductivitySuite/docker_compose/intel/cpu/xeon/README.md

Lines changed: 5 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -81,13 +81,6 @@ cd GenAIExamples/CodeGen
8181
docker build --no-cache -t opea/codegen:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile .
8282
```
8383

84-
#### 8.4 Build FAQGen Megaservice Docker Images
85-
86-
```bash
87-
cd GenAIExamples/FaqGen
88-
docker build --no-cache -t opea/faqgen:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f Dockerfile .
89-
```
90-
9184
### 9. Build UI Docker Image
9285

9386
Build frontend Docker image that enables via below command:
@@ -159,7 +152,6 @@ export TGI_LLM_ENDPOINT_FAQGEN="http://${host_ip}:9009"
159152
export TGI_LLM_ENDPOINT_DOCSUM="http://${host_ip}:9009"
160153
export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${host_ip}:8888/v1/chatqna"
161154
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:5000/v1/dataprep/delete"
162-
export BACKEND_SERVICE_ENDPOINT_FAQGEN="http://${host_ip}:8889/v1/faqgen"
163155
export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${host_ip}:7778/v1/codegen"
164156
export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${host_ip}:8890/v1/docsum"
165157
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:5000/v1/dataprep/ingest"
@@ -316,31 +308,23 @@ Please refer to **[keycloak_setup_guide](keycloak_setup_guide.md)** for more det
316308
}'
317309
```
318310

319-
13. FAQGen MegaService
320-
321-
```bash
322-
curl http://${host_ip}:8889/v1/faqgen -H "Content-Type: application/json" -d '{
323-
"messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."
324-
}'
325-
```
326-
327-
14. DocSum MegaService
311+
13. DocSum MegaService
328312

329313
```bash
330314
curl http://${host_ip}:8890/v1/docsum -H "Content-Type: application/json" -d '{
331315
"messages": "Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."
332316
}'
333317
```
334318

335-
15. CodeGen MegaService
319+
14. CodeGen MegaService
336320

337321
```bash
338322
curl http://${host_ip}:7778/v1/codegen -H "Content-Type: application/json" -d '{
339323
"messages": "def print_hello_world():"
340324
}'
341325
```
342326

343-
16. Dataprep Microservice
327+
15. Dataprep Microservice
344328

345329
If you want to update the default knowledge base, you can use the following commands:
346330

@@ -390,7 +374,7 @@ Please refer to **[keycloak_setup_guide](keycloak_setup_guide.md)** for more det
390374
-H "Content-Type: application/json"
391375
```
392376

393-
17. Prompt Registry Microservice
377+
16. Prompt Registry Microservice
394378

395379
If you want to update the default Prompts in the application for your user, you can use the following commands:
396380

@@ -433,7 +417,7 @@ Please refer to **[keycloak_setup_guide](keycloak_setup_guide.md)** for more det
433417
"user": "test", "prompt_id":"{prompt_id to be deleted}"}'
434418
```
435419

436-
18. Chat History Microservice
420+
17. Chat History Microservice
437421

438422
To validate the chatHistory Microservice, you can use the following commands.
439423

ProductivitySuite/docker_compose/intel/cpu/xeon/compose.yaml

Lines changed: 0 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -280,24 +280,6 @@ services:
280280
FAQGen_COMPONENT_NAME: ${FAQGen_COMPONENT_NAME}
281281
LOGFLAG: ${LOGFLAG:-False}
282282
restart: unless-stopped
283-
faqgen-xeon-backend-server:
284-
image: ${REGISTRY:-opea}/faqgen:${TAG:-latest}
285-
container_name: faqgen-xeon-backend-server
286-
depends_on:
287-
- tgi_service
288-
- llm_faqgen
289-
ports:
290-
- "8889:8888"
291-
environment:
292-
no_proxy: ${no_proxy}
293-
https_proxy: ${https_proxy}
294-
http_proxy: ${http_proxy}
295-
MEGA_SERVICE_HOST_IP: ${MEGA_SERVICE_HOST_IP}
296-
LLM_SERVICE_PORT: ${LLM_SERVICE_HOST_PORT_FAQGEN}
297-
LLM_SERVICE_HOST_IP: ${LLM_SERVICE_HOST_IP_FAQGEN}
298-
LOGFLAG: ${LOGFLAG}
299-
ipc: host
300-
restart: always
301283
mongo:
302284
image: mongo:7.0.11
303285
container_name: mongodb
@@ -362,7 +344,6 @@ services:
362344
- APP_BACKEND_SERVICE_ENDPOINT_CHATQNA=${BACKEND_SERVICE_ENDPOINT_CHATQNA}
363345
- APP_BACKEND_SERVICE_ENDPOINT_CODEGEN=${BACKEND_SERVICE_ENDPOINT_CODEGEN}
364346
- APP_BACKEND_SERVICE_ENDPOINT_DOCSUM=${BACKEND_SERVICE_ENDPOINT_DOCSUM}
365-
- APP_BACKEND_SERVICE_ENDPOINT_FAQGEN=${BACKEND_SERVICE_ENDPOINT_FAQGEN}
366347
- APP_DATAPREP_SERVICE_ENDPOINT=${DATAPREP_SERVICE_ENDPOINT}
367348
- APP_DATAPREP_GET_FILE_ENDPOINT=${DATAPREP_GET_FILE_ENDPOINT}
368349
- APP_DATAPREP_DELETE_FILE_ENDPOINT=${DATAPREP_DELETE_FILE_ENDPOINT}

ProductivitySuite/docker_compose/intel/cpu/xeon/set_env.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@ export TGI_LLM_ENDPOINT_FAQGEN="http://${host_ip}:9009"
3333
export TGI_LLM_ENDPOINT_DOCSUM="http://${host_ip}:9009"
3434
export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${host_ip}:8888/v1/chatqna"
3535
export DATAPREP_DELETE_FILE_ENDPOINT="http://${host_ip}:5000/v1/dataprep/delete"
36-
export BACKEND_SERVICE_ENDPOINT_FAQGEN="http://${host_ip}:8889/v1/faqgen"
3736
export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${host_ip}:7778/v1/codegen"
3837
export BACKEND_SERVICE_ENDPOINT_DOCSUM="http://${host_ip}:8890/v1/docsum"
3938
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:5000/v1/dataprep/ingest"

ProductivitySuite/docker_image_build/build.yaml

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -68,18 +68,12 @@ services:
6868
context: ../../CodeGen/
6969
dockerfile: ./Dockerfile
7070
image: ${REGISTRY:-opea}/codegen:${TAG:-latest}
71-
faqgen:
71+
llm-faqgen:
7272
build:
7373
args:
7474
http_proxy: ${http_proxy}
7575
https_proxy: ${https_proxy}
7676
no_proxy: ${no_proxy}
77-
context: ../../FaqGen/
78-
dockerfile: ./Dockerfile
79-
image: ${REGISTRY:-opea}/faqgen:${TAG:-latest}
80-
llm-faqgen:
81-
build:
8277
context: GenAIComps
8378
dockerfile: comps/llms/src/faq-generation/Dockerfile
84-
extends: faqgen
8579
image: ${REGISTRY:-opea}/llm-faqgen:${TAG:-latest}

ProductivitySuite/tests/test_compose_on_xeon.sh

Lines changed: 0 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ function start_services() {
5858
export TGI_LLM_ENDPOINT_FAQGEN="http://${ip_address}:9009"
5959
export TGI_LLM_ENDPOINT_DOCSUM="http://${ip_address}:9009"
6060
export BACKEND_SERVICE_ENDPOINT_CHATQNA="http://${ip_address}:8888/v1/chatqna"
61-
export BACKEND_SERVICE_ENDPOINT_FAQGEN="http://${ip_address}:8889/v1/faqgen"
6261
export DATAPREP_DELETE_FILE_ENDPOINT="http://${ip_address}:5000/v1/dataprep/delete"
6362
export BACKEND_SERVICE_ENDPOINT_CODEGEN="http://${ip_address}:7778/v1/codegen"
6463
export DATAPREP_SERVICE_ENDPOINT="http://${ip_address}:5000/v1/dataprep/ingest"
@@ -116,9 +115,6 @@ function validate_service() {
116115
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -H 'Content-Type: application/json' "$URL")
117116
elif [[ $SERVICE_NAME == *"dataprep_del"* ]]; then
118117
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d '{"file_path": "all"}' -H 'Content-Type: application/json' "$URL")
119-
elif [[ $SERVICE_NAME == *"faqgen-xeon-backend-server"* ]]; then
120-
local INPUT_DATA="messages=Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."
121-
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -F "$INPUT_DATA" -F "max_tokens=32" -F "stream=False" -H 'Content-Type: multipart/form-data' "$URL")
122118
else
123119
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL")
124120
fi
@@ -145,33 +141,6 @@ function validate_service() {
145141
sleep 1s
146142
}
147143

148-
function validate_faqgen_megaservice() {
149-
local URL="$1"
150-
local SERVICE_NAME="$2"
151-
local DOCKER_NAME="$3"
152-
local EXPECTED_RESULT="Embeddings"
153-
local INPUT_DATA="messages=Text Embeddings Inference (TEI) is a toolkit for deploying and serving open source text embeddings and sequence classification models. TEI enables high-performance extraction for the most popular models, including FlagEmbedding, Ember, GTE and E5."
154-
local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -F "$INPUT_DATA" -F "max_tokens=32" -F "stream=False" -H 'Content-Type: multipart/form-data' "$URL")
155-
if [ "$HTTP_STATUS" -eq 200 ]; then
156-
echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..."
157-
158-
local CONTENT=$(curl -s -X POST -F "$INPUT_DATA" -F "max_tokens=32" -F "stream=False" -H 'Content-Type: multipart/form-data' "$URL" | tee ${LOG_PATH}/${SERVICE_NAME}.log)
159-
160-
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
161-
echo "[ $SERVICE_NAME ] Content is as expected."
162-
else
163-
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
164-
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
165-
exit 1
166-
fi
167-
else
168-
echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS"
169-
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
170-
exit 1
171-
fi
172-
sleep 1s
173-
}
174-
175144
function validate_faqgen() {
176145
local URL="$1"
177146
local EXPECTED_RESULT="$2"
@@ -351,12 +320,6 @@ function validate_megaservice() {
351320
"chatqna-xeon-backend-server" \
352321
'{"messages": "What is the revenue of Nike in 2023?"}'\
353322

354-
# Curl the FAQGenMega Service
355-
validate_faqgen_megaservice \
356-
"${ip_address}:8889/v1/faqgen" \
357-
"faqgen-xeon-backend-server" \
358-
"faqgen-xeon-backend-server"
359-
360323
# Curl the CodeGen Mega Service
361324
validate_service \
362325
"${ip_address}:7778/v1/codegen" \

0 commit comments

Comments
 (0)