Skip to content

Commit 9f80a18

Browse files
authored
Integrate GraphRAG set_env to ut scripts. (opea-project#1943)
Integrate GraphRAG set_env to ut scripts. Add README.md for UT scripts. Signed-off-by: ZePan110 <[email protected]>
1 parent f2c8e0b commit 9f80a18

File tree

3 files changed

+20
-26
lines changed

3 files changed

+20
-26
lines changed

GraphRAG/docker_compose/intel/hpu/gaudi/set_env.sh

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,14 +10,16 @@ pushd "../../../../../" > /dev/null
1010
source .set_env.sh
1111
popd > /dev/null
1212

13+
host_ip=$(hostname -I | awk '{print $1}')
14+
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
15+
export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
1316
export TEI_EMBEDDER_PORT=11633
1417
export LLM_ENDPOINT_PORT=11634
1518
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
1619
export OPENAI_EMBEDDING_MODEL="text-embedding-3-small"
1720
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct"
1821
export OPENAI_LLM_MODEL="gpt-4o"
1922
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}"
20-
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct"
2123
export TGI_LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
2224
export NEO4J_PORT1=11631
2325
export NEO4J_PORT2=11632
@@ -32,3 +34,4 @@ export MAX_TOTAL_TOKENS=8192
3234
export DATA_PATH="/mnt/nvme2n1/hf_cache"
3335
export DATAPREP_PORT=11103
3436
export RETRIEVER_PORT=11635
37+
export MEGA_SERVICE_PORT=8888

GraphRAG/tests/README.md

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# GraphRAG E2E test scripts
2+
3+
## Set the required environment variable
4+
5+
```bash
6+
export HUGGINGFACEHUB_API_TOKEN="Your_Huggingface_API_Token"
7+
```
8+
9+
## Run test
10+
11+
On Intel Gaudi:
12+
13+
```bash
14+
bash test_compose_on_gaudi.sh
15+
```

GraphRAG/tests/test_compose_on_gaudi.sh

Lines changed: 1 addition & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -41,31 +41,7 @@ function build_docker_images() {
4141

4242
function start_services() {
4343
cd $WORKPATH/docker_compose/intel/hpu/gaudi
44-
export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
45-
export HF_TOKEN=${HUGGINGFACEHUB_API_TOKEN}
46-
47-
export TEI_EMBEDDER_PORT=11633
48-
export LLM_ENDPOINT_PORT=11634
49-
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
50-
export OPENAI_EMBEDDING_MODEL="text-embedding-3-small"
51-
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct"
52-
export OPENAI_LLM_MODEL="gpt-4o"
53-
export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:${TEI_EMBEDDER_PORT}"
54-
export LLM_MODEL_ID="meta-llama/Meta-Llama-3.1-8B-Instruct"
55-
export TGI_LLM_ENDPOINT="http://${host_ip}:${LLM_ENDPOINT_PORT}"
56-
export NEO4J_PORT1=11631
57-
export NEO4J_PORT2=11632
58-
export NEO4J_URI="bolt://${host_ip}:${NEO4J_PORT2}"
59-
export NEO4J_URL="bolt://${host_ip}:${NEO4J_PORT2}"
60-
export NEO4J_USERNAME="neo4j"
61-
export NEO4J_PASSWORD="neo4jtest"
62-
export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:5000/v1/dataprep/ingest"
63-
export LOGFLAG=True
64-
export MAX_INPUT_TOKENS=4096
65-
export MAX_TOTAL_TOKENS=8192
66-
export DATAPREP_PORT=11103
67-
export RETRIEVER_PORT=11635
68-
export MEGA_SERVICE_PORT=8888
44+
source set_env.sh
6945
unset OPENAI_API_KEY
7046

7147
# Start Docker Containers

0 commit comments

Comments
 (0)