Skip to content

Commit 3913c7b

Browse files
letonghanpre-commit-ci[bot]
andauthoredAug 6, 2024··
Refine docker_compose for dataprep param settings (#486)
* add TEI_ENDPOINT for dataprep Signed-off-by: letonghan <[email protected]> * add TEI_ENDPOINT for dataprep Signed-off-by: letonghan <[email protected]> * update ui test file Signed-off-by: letonghan <[email protected]> * add dataprep test into script Signed-off-by: letonghan <[email protected]> * refine e2e test of chatqna Signed-off-by: letonghan <[email protected]> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix missing dockerfile path Signed-off-by: letonghan <[email protected]> * check test issue Signed-off-by: letonghan <[email protected]> * check dataprep log Signed-off-by: letonghan <[email protected]> * reverse codetrans Signed-off-by: letonghan <[email protected]> * cd log path Signed-off-by: letonghan <[email protected]> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * add hf token in compose yaml Signed-off-by: letonghan <[email protected]> * add redis_host Signed-off-by: letonghan <[email protected]> * add upload link test Signed-off-by: letonghan <[email protected]> * re-format validate dataprep Signed-off-by: letonghan <[email protected]> * fix typo Signed-off-by: letonghan <[email protected]> * fix frontend env name Signed-off-by: letonghan <[email protected]> * fix ci issue Signed-off-by: letonghan <[email protected]> --------- Signed-off-by: letonghan <[email protected]> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
1 parent abc02e1 commit 3913c7b

10 files changed

+183
-64
lines changed
 

‎ChatQnA/docker/gaudi/compose.yaml

+3
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -26,6 +27,8 @@ services:
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
2829
INDEX_NAME: ${INDEX_NAME}
30+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
31+
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
2932
tei-embedding-service:
3033
image: opea/tei-gaudi:latest
3134
container_name: tei-embedding-gaudi-server

‎ChatQnA/docker/gaudi/compose_guardrails.yaml

+2
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -26,6 +27,7 @@ services:
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
2829
INDEX_NAME: ${INDEX_NAME}
30+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
2931
tgi-guardrails-service:
3032
image: ghcr.io/huggingface/tgi-gaudi:2.0.1
3133
container_name: tgi-guardrails-server

‎ChatQnA/docker/gaudi/compose_vllm.yaml

+2
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -26,6 +27,7 @@ services:
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
2829
INDEX_NAME: ${INDEX_NAME}
30+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
2931
tei-embedding-service:
3032
image: opea/tei-gaudi:latest
3133
container_name: tei-embedding-gaudi-server

‎ChatQnA/docker/gaudi/compose_vllm_ray.yaml

+2
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -26,6 +27,7 @@ services:
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
2829
INDEX_NAME: ${INDEX_NAME}
30+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
2931
tei-embedding-service:
3032
image: opea/tei-gaudi:latest
3133
container_name: tei-embedding-gaudi-server

‎ChatQnA/docker/gpu/compose.yaml

+4
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -25,7 +26,10 @@ services:
2526
http_proxy: ${http_proxy}
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
29+
REDIS_HOST: ${REDIS_HOST}
2830
INDEX_NAME: ${INDEX_NAME}
31+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
32+
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
2933
tei-embedding-service:
3034
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
3135
container_name: tei-embedding-server

‎ChatQnA/docker/xeon/compose.yaml

+5
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -25,7 +26,10 @@ services:
2526
http_proxy: ${http_proxy}
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
29+
REDIS_HOST: ${REDIS_HOST}
2830
INDEX_NAME: ${INDEX_NAME}
31+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
32+
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
2933
tei-embedding-service:
3034
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5
3135
container_name: tei-embedding-server
@@ -154,6 +158,7 @@ services:
154158
- redis-vector-db
155159
- tei-embedding-service
156160
- embedding
161+
- dataprep-redis-service
157162
- retriever
158163
- tei-reranking-service
159164
- reranking

‎ChatQnA/docker/xeon/docker_compose_qdrant.yaml

+2
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-qdrant-server
1717
depends_on:
1818
- qdrant-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6000:6000"
2122
environment:
@@ -25,6 +26,7 @@ services:
2526
QDRANT: ${host_ip}
2627
QDRANT_PORT: 6333
2728
COLLECTION_NAME: ${INDEX_NAME}
29+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
2830
tei-embedding-service:
2931
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.2
3032
container_name: tei-embedding-server

‎ChatQnA/docker/xeon/docker_compose_vllm.yaml

+2
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ services:
1616
container_name: dataprep-redis-server
1717
depends_on:
1818
- redis-vector-db
19+
- tei-embedding-service
1920
ports:
2021
- "6007:6007"
2122
- "6008:6008"
@@ -26,6 +27,7 @@ services:
2627
https_proxy: ${https_proxy}
2728
REDIS_URL: ${REDIS_URL}
2829
INDEX_NAME: ${INDEX_NAME}
30+
TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
2931
tei-embedding-service:
3032
image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.2
3133
container_name: tei-embedding-server

‎ChatQnA/tests/test_chatqna_on_gaudi.sh

+78-32
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ function start_services() {
5252
export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808"
5353
export TGI_LLM_ENDPOINT="http://${ip_address}:8008"
5454
export REDIS_URL="redis://${ip_address}:6379"
55+
export REDIS_HOST=${ip_address}
5556
export INDEX_NAME="rag-redis"
5657
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
5758
export MEGA_SERVICE_HOST_IP=${ip_address}
@@ -61,6 +62,8 @@ function start_services() {
6162
export LLM_SERVICE_HOST_IP=${ip_address}
6263
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/chatqna"
6364
export DATAPREP_SERVICE_ENDPOINT="http://${ip_address}:6007/v1/dataprep"
65+
export DATAPREP_GET_FILE_ENDPOINT="http://${ip_address}:6008/v1/dataprep/get_file"
66+
export DATAPREP_DELETE_FILE_ENDPOINT="http://${ip_address}:6009/v1/dataprep/delete_file"
6467

6568
sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
6669

@@ -93,104 +96,147 @@ function start_services() {
9396
done
9497
}
9598

96-
function validate_services() {
99+
function validate_service() {
97100
local URL="$1"
98101
local EXPECTED_RESULT="$2"
99102
local SERVICE_NAME="$3"
100103
local DOCKER_NAME="$4"
101104
local INPUT_DATA="$5"
102105

103-
local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL")
104-
if [ "$HTTP_STATUS" -eq 200 ]; then
105-
echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..."
106+
if [[ $SERVICE_NAME == *"dataprep_upload_file"* ]]; then
107+
cd $LOG_PATH
108+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -F 'files=@./dataprep_file.txt' -H 'Content-Type: multipart/form-data' "$URL")
109+
elif [[ $SERVICE_NAME == *"dataprep_upload_link"* ]]; then
110+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -F 'link_list=["https://www.ces.tech/"]' "$URL")
111+
elif [[ $SERVICE_NAME == *"dataprep_get"* ]]; then
112+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -H 'Content-Type: application/json' "$URL")
113+
elif [[ $SERVICE_NAME == *"dataprep_del"* ]]; then
114+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d '{"file_path": "all"}' -H 'Content-Type: application/json' "$URL")
115+
else
116+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL")
117+
fi
118+
HTTP_STATUS=$(echo $HTTP_RESPONSE | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
119+
RESPONSE_BODY=$(echo $HTTP_RESPONSE | sed -e 's/HTTPSTATUS\:.*//g')
106120

107-
local CONTENT=$(curl -s -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL" | tee ${LOG_PATH}/${SERVICE_NAME}.log)
121+
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
108122

109-
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
110-
echo "[ $SERVICE_NAME ] Content is as expected."
111-
else
112-
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
113-
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
114-
exit 1
115-
fi
116-
else
123+
# check response status
124+
if [ "$HTTP_STATUS" -ne "200" ]; then
117125
echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS"
118-
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
119126
exit 1
127+
else
128+
echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..."
129+
fi
130+
# check response body
131+
if [[ "$RESPONSE_BODY" != *"$EXPECTED_RESULT"* ]]; then
132+
echo "[ $SERVICE_NAME ] Content does not match the expected result: $RESPONSE_BODY"
133+
exit 1
134+
else
135+
echo "[ $SERVICE_NAME ] Content is as expected."
120136
fi
137+
121138
sleep 1s
122139
}
123140

124141
function validate_microservices() {
125142
# Check if the microservices are running correctly.
126143

127144
# tei for embedding service
128-
validate_services \
145+
validate_service \
129146
"${ip_address}:8090/embed" \
130-
"\[\[" \
147+
"[[" \
131148
"tei-embedding" \
132149
"tei-embedding-gaudi-server" \
133150
'{"inputs":"What is Deep Learning?"}'
134151

135152
# embedding microservice
136-
validate_services \
153+
validate_service \
137154
"${ip_address}:6000/v1/embeddings" \
138-
'"text":"What is Deep Learning?","embedding":\[' \
139-
"embedding" \
155+
'"text":"What is Deep Learning?","embedding":[' \
156+
"embedding-microservice" \
140157
"embedding-tei-server" \
141158
'{"text":"What is Deep Learning?"}'
142159

143160
sleep 1m # retrieval can't curl as expected, try to wait for more time
144161

162+
# test /v1/dataprep upload file
163+
echo "Deep learning is a subset of machine learning that utilizes neural networks with multiple layers to analyze various levels of abstract data representations. It enables computers to identify patterns and make decisions with minimal human intervention by learning from large amounts of data." > $LOG_PATH/dataprep_file.txt
164+
validate_service \
165+
"http://${ip_address}:6007/v1/dataprep" \
166+
"Data preparation succeeded" \
167+
"dataprep_upload_file" \
168+
"dataprep-redis-server"
169+
170+
# test /v1/dataprep upload link
171+
validate_service \
172+
"http://${ip_address}:6007/v1/dataprep" \
173+
"Data preparation succeeded" \
174+
"dataprep_upload_link" \
175+
"dataprep-redis-server"
176+
177+
# test /v1/dataprep/get_file
178+
validate_service \
179+
"http://${ip_address}:6008/v1/dataprep/get_file" \
180+
'{"name":' \
181+
"dataprep_get" \
182+
"dataprep-redis-server"
183+
184+
# test /v1/dataprep/delete_file
185+
validate_service \
186+
"http://${ip_address}:6009/v1/dataprep/delete_file" \
187+
'{"status":true}' \
188+
"dataprep_del" \
189+
"dataprep-redis-server"
190+
145191
# retrieval microservice
146192
test_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)")
147-
validate_services \
193+
validate_service \
148194
"${ip_address}:7000/v1/retrieval" \
149-
" " \
150-
"retrieval" \
195+
"retrieved_docs" \
196+
"retrieval-microservice" \
151197
"retriever-redis-server" \
152198
"{\"text\":\"What is the revenue of Nike in 2023?\",\"embedding\":${test_embedding}}"
153199

154200
# tei for rerank microservice
155-
validate_services \
201+
validate_service \
156202
"${ip_address}:8808/rerank" \
157203
'{"index":1,"score":' \
158204
"tei-rerank" \
159205
"tei-reranking-gaudi-server" \
160206
'{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}'
161207

162208
# rerank microservice
163-
validate_services \
209+
validate_service \
164210
"${ip_address}:8000/v1/reranking" \
165211
"Deep learning is..." \
166-
"rerank" \
212+
"rerank-microservice" \
167213
"reranking-tei-gaudi-server" \
168214
'{"initial_query":"What is Deep Learning?", "retrieved_docs": [{"text":"Deep Learning is not..."}, {"text":"Deep learning is..."}]}'
169215

170216
# tgi for llm service
171-
validate_services \
217+
validate_service \
172218
"${ip_address}:8008/generate" \
173219
"generated_text" \
174220
"tgi-llm" \
175221
"tgi-gaudi-server" \
176222
'{"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":17, "do_sample": true}}'
177223

178224
# llm microservice
179-
validate_services \
225+
validate_service \
180226
"${ip_address}:9000/v1/chat/completions" \
181227
"data: " \
182-
"llm" \
228+
"llm-microservice" \
183229
"llm-tgi-gaudi-server" \
184230
'{"query":"What is Deep Learning?"}'
185231

186232
}
187233

188234
function validate_megaservice() {
189235
# Curl the Mega Service
190-
validate_services \
236+
validate_service \
191237
"${ip_address}:8888/v1/chatqna" \
192-
"billion" \
193-
"mega-chatqna" \
238+
"data: " \
239+
"chatqna-megaservice" \
194240
"chatqna-gaudi-backend-server" \
195241
'{"messages": "What is the revenue of Nike in 2023?"}'
196242

@@ -241,7 +287,7 @@ function main() {
241287
elif [ "${mode}" == "" ]; then
242288
validate_microservices
243289
validate_megaservice
244-
# validate_frontend
290+
validate_frontend
245291
fi
246292

247293
stop_docker

‎ChatQnA/tests/test_chatqna_on_xeon.sh

+83-32
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
# Copyright (C) 2024 Intel Corporation
33
# SPDX-License-Identifier: Apache-2.0
44

5-
set -e
5+
set -xe
66
echo "IMAGE_REPO=${IMAGE_REPO}"
77

88
WORKPATH=$(dirname "$PWD")
@@ -39,6 +39,7 @@ function start_services() {
3939
export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808"
4040
export TGI_LLM_ENDPOINT="http://${ip_address}:9009"
4141
export REDIS_URL="redis://${ip_address}:6379"
42+
export REDIS_HOST=${ip_address}
4243
export INDEX_NAME="rag-redis"
4344
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
4445
export MEGA_SERVICE_HOST_IP=${ip_address}
@@ -48,6 +49,8 @@ function start_services() {
4849
export LLM_SERVICE_HOST_IP=${ip_address}
4950
export BACKEND_SERVICE_ENDPOINT="http://${ip_address}:8888/v1/chatqna"
5051
export DATAPREP_SERVICE_ENDPOINT="http://${ip_address}:6007/v1/dataprep"
52+
export DATAPREP_GET_FILE_ENDPOINT="http://${ip_address}:6008/v1/dataprep/get_file"
53+
export DATAPREP_DELETE_FILE_ENDPOINT="http://${ip_address}:6009/v1/dataprep/delete_file"
5154

5255
sed -i "s/backend_address/$ip_address/g" $WORKPATH/docker/ui/svelte/.env
5356

@@ -79,116 +82,161 @@ function start_services() {
7982
done
8083
}
8184

82-
function validate_services() {
85+
function validate_service() {
8386
local URL="$1"
8487
local EXPECTED_RESULT="$2"
8588
local SERVICE_NAME="$3"
8689
local DOCKER_NAME="$4"
8790
local INPUT_DATA="$5"
8891

89-
local HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL")
90-
if [ "$HTTP_STATUS" -eq 200 ]; then
91-
echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..."
92+
if [[ $SERVICE_NAME == *"dataprep_upload_file"* ]]; then
93+
cd $LOG_PATH
94+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -F 'files=@./dataprep_file.txt' -H 'Content-Type: multipart/form-data' "$URL")
95+
elif [[ $SERVICE_NAME == *"dataprep_upload_link"* ]]; then
96+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -F 'link_list=["https://www.ces.tech/"]' "$URL")
97+
elif [[ $SERVICE_NAME == *"dataprep_get"* ]]; then
98+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -H 'Content-Type: application/json' "$URL")
99+
elif [[ $SERVICE_NAME == *"dataprep_del"* ]]; then
100+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d '{"file_path": "all"}' -H 'Content-Type: application/json' "$URL")
101+
else
102+
HTTP_RESPONSE=$(curl --silent --write-out "HTTPSTATUS:%{http_code}" -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL")
103+
fi
104+
HTTP_STATUS=$(echo $HTTP_RESPONSE | tr -d '\n' | sed -e 's/.*HTTPSTATUS://')
105+
RESPONSE_BODY=$(echo $HTTP_RESPONSE | sed -e 's/HTTPSTATUS\:.*//g')
92106

93-
local CONTENT=$(curl -s -X POST -d "$INPUT_DATA" -H 'Content-Type: application/json' "$URL" | tee ${LOG_PATH}/${SERVICE_NAME}.log)
107+
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
94108

95-
if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then
96-
echo "[ $SERVICE_NAME ] Content is as expected."
97-
else
98-
echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT"
99-
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
100-
exit 1
101-
fi
102-
else
109+
# check response status
110+
if [ "$HTTP_STATUS" -ne "200" ]; then
103111
echo "[ $SERVICE_NAME ] HTTP status is not 200. Received status was $HTTP_STATUS"
104-
docker logs ${DOCKER_NAME} >> ${LOG_PATH}/${SERVICE_NAME}.log
105112
exit 1
113+
else
114+
echo "[ $SERVICE_NAME ] HTTP status is 200. Checking content..."
115+
fi
116+
# check response body
117+
if [[ "$RESPONSE_BODY" != *"$EXPECTED_RESULT"* ]]; then
118+
echo "[ $SERVICE_NAME ] Content does not match the expected result: $RESPONSE_BODY"
119+
exit 1
120+
else
121+
echo "[ $SERVICE_NAME ] Content is as expected."
106122
fi
123+
107124
sleep 1s
108125
}
109126

110127
function validate_microservices() {
111128
# Check if the microservices are running correctly.
112129

113130
# tei for embedding service
114-
validate_services \
131+
validate_service \
115132
"${ip_address}:6006/embed" \
116-
"\[\[" \
133+
"[[" \
117134
"tei-embedding" \
118135
"tei-embedding-server" \
119136
'{"inputs":"What is Deep Learning?"}'
120137

121138
# embedding microservice
122-
validate_services \
139+
validate_service \
123140
"${ip_address}:6000/v1/embeddings" \
124-
'"text":"What is Deep Learning?","embedding":\[' \
125-
"embedding" \
141+
'"text":"What is Deep Learning?","embedding":[' \
142+
"embedding-microservice" \
126143
"embedding-tei-server" \
127144
'{"text":"What is Deep Learning?"}'
128145

129146
sleep 1m # retrieval can't curl as expected, try to wait for more time
130147

148+
# test /v1/dataprep upload file
149+
echo "Deep learning is a subset of machine learning that utilizes neural networks with multiple layers to analyze various levels of abstract data representations. It enables computers to identify patterns and make decisions with minimal human intervention by learning from large amounts of data." > $LOG_PATH/dataprep_file.txt
150+
validate_service \
151+
"http://${ip_address}:6007/v1/dataprep" \
152+
"Data preparation succeeded" \
153+
"dataprep_upload_file" \
154+
"dataprep-redis-server"
155+
156+
# test /v1/dataprep upload link
157+
validate_service \
158+
"http://${ip_address}:6007/v1/dataprep" \
159+
"Data preparation succeeded" \
160+
"dataprep_upload_link" \
161+
"dataprep-redis-server"
162+
163+
# test /v1/dataprep/get_file
164+
validate_service \
165+
"http://${ip_address}:6008/v1/dataprep/get_file" \
166+
'{"name":' \
167+
"dataprep_get" \
168+
"dataprep-redis-server"
169+
170+
# test /v1/dataprep/delete_file
171+
validate_service \
172+
"http://${ip_address}:6009/v1/dataprep/delete_file" \
173+
'{"status":true}' \
174+
"dataprep_del" \
175+
"dataprep-redis-server"
176+
131177
# retrieval microservice
132178
test_embedding=$(python3 -c "import random; embedding = [random.uniform(-1, 1) for _ in range(768)]; print(embedding)")
133-
validate_services \
179+
validate_service \
134180
"${ip_address}:7000/v1/retrieval" \
135-
" " \
136-
"retrieval" \
181+
"retrieved_docs" \
182+
"retrieval-microservice" \
137183
"retriever-redis-server" \
138184
"{\"text\":\"What is the revenue of Nike in 2023?\",\"embedding\":${test_embedding}}"
139185

140186
# tei for rerank microservice
141-
validate_services \
187+
validate_service \
142188
"${ip_address}:8808/rerank" \
143189
'{"index":1,"score":' \
144190
"tei-rerank" \
145191
"tei-reranking-server" \
146192
'{"query":"What is Deep Learning?", "texts": ["Deep Learning is not...", "Deep learning is..."]}'
147193

148194
# rerank microservice
149-
validate_services \
195+
validate_service \
150196
"${ip_address}:8000/v1/reranking" \
151197
"Deep learning is..." \
152-
"rerank" \
198+
"rerank-microservice" \
153199
"reranking-tei-xeon-server" \
154200
'{"initial_query":"What is Deep Learning?", "retrieved_docs": [{"text":"Deep Learning is not..."}, {"text":"Deep learning is..."}]}'
155201

156202
# tgi for llm service
157-
validate_services \
203+
validate_service \
158204
"${ip_address}:9009/generate" \
159205
"generated_text" \
160206
"tgi-llm" \
161207
"tgi-service" \
162208
'{"inputs":"What is Deep Learning?","parameters":{"max_new_tokens":17, "do_sample": true}}'
163209

164210
# llm microservice
165-
validate_services \
211+
validate_service \
166212
"${ip_address}:9000/v1/chat/completions" \
167213
"data: " \
168-
"llm" \
214+
"llm-microservice" \
169215
"llm-tgi-server" \
170216
'{"query":"What is Deep Learning?"}'
171217

172218
}
173219

174220
function validate_megaservice() {
175221
# Curl the Mega Service
176-
validate_services \
222+
validate_service \
177223
"${ip_address}:8888/v1/chatqna" \
178-
"billion" \
179-
"mega-chatqna" \
224+
"data: " \
225+
"chatqna-megaservice" \
180226
"chatqna-xeon-backend-server" \
181227
'{"messages": "What is the revenue of Nike in 2023?"}'
182228

183229
}
184230

185231
function validate_frontend() {
232+
echo "[ TEST INFO ]: --------- frontend test started ---------"
186233
cd $WORKPATH/docker/ui/svelte
187234
local conda_env_name="OPEA_e2e"
188235
export PATH=${HOME}/miniforge3/bin/:$PATH
189236
# conda remove -n ${conda_env_name} --all -y
190237
# conda create -n ${conda_env_name} python=3.12 -y
191238
source activate ${conda_env_name}
239+
echo "[ TEST INFO ]: --------- conda env activated ---------"
192240

193241
sed -i "s/localhost/$ip_address/g" playwright.config.ts
194242

@@ -226,8 +274,11 @@ function main() {
226274
python3 $WORKPATH/tests/chatqna_benchmark.py
227275
elif [ "${mode}" == "" ]; then
228276
validate_microservices
277+
echo "==== microservices validated ===="
229278
validate_megaservice
279+
echo "==== megaservice validated ===="
230280
validate_frontend
281+
echo "==== frontend validated ===="
231282
fi
232283

233284
stop_docker

0 commit comments

Comments
 (0)
Please sign in to comment.