Skip to content

feat(interactive): Introduce kafka as another wal storage #6649

feat(interactive): Introduce kafka as another wal storage

feat(interactive): Introduce kafka as another wal storage #6649

Workflow file for this run

name: GraphScope Store CI
on:
# Trigger the workflow on push or pull request,
# but only for the main branch
workflow_dispatch:
push:
branches:
- main
paths-ignore:
- 'CONTRIBUTORS'
- 'LICENSE'
- 'NOTICE.txt'
- '**.md'
- '**.rst'
- 'docs/**'
- 'demo/**'
- 'scripts/**'
- 'tutorials/**'
pull_request:
branches:
- main
paths:
- 'proto/**'
- 'interactive_engine/**'
- 'python/graphscope/client/**'
- 'charts/graphscope-store/**'
- 'flex/openapi/openapi_coordinator.yaml'
- 'coordinator/gscoordinator/flex/**'
- 'python/graphscope/gsctl/**'
- '.github/workflows/gss.yml'
- '!interactive_engine/**.md'
- '!charts/graphscope-store/**.md'
concurrency:
group: ${{ github.repository }}-${{ github.event.number || github.head_ref || github.sha }}-${{ github.workflow }}
cancel-in-progress: true
env:
GSS_IMAGE: registry.cn-hongkong.aliyuncs.com/graphscope/graphscope-store
COORDINATOR_IMAGE: registry.cn-hongkong.aliyuncs.com/graphscope/coordinator
jobs:
gremlin-test:
# Require the host is able to run docker without sudo and
# can `ssh localhost` without password, which may need to
# be configured manually when a new self-hosted runner is added.
runs-on: [self-hosted, manylinux2014]
if: ${{ github.repository == 'alibaba/GraphScope' }}
steps:
- uses: actions/checkout@v4
- name: Detect the tmate session
run: |
if grep -v "grep" .github/workflows/gss.yml | grep "action-tmate"; then
echo 'WARNING!!!the self-hosted machine cannot run tmate session, please debug it manually'
exit 1
fi
- uses: actions/cache@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- uses: actions/cache@v4
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
~/.cache/sccache
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Build GraphScope Store
run: |
. ${HOME}/.graphscope_env
export SCCACHE_DIR=~/.cache/sccache
export RUSTC_WRAPPER=/usr/local/bin/sccache
cd ${GITHUB_WORKSPACE}/interactive_engine
mvn clean install -P groot -Drust.compile.mode=debug -DskipTests --quiet
mvn clean install -Pgroot-data-load --quiet
sccache --show-stats
# - name: Gremlin Test
# run: |
# . ${HOME}/.graphscope_env
# cd interactive_engine/groot-server
# # the ir-core based test
# mvn test -P gremlin-test
- name: Groot with GOpt Integration Test
run: |
. ${HOME}/.graphscope_env
rm -rf /tmp/gstest || true
git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git /tmp/gstest
mv /tmp/gstest/flex/ldbc-sf01-long-date /tmp/gstest/ldbc
cp -r flex/interactive/examples/movies /tmp/gstest/
cd interactive_engine/groot-client && ./gopt_groot_test.sh
- name: Upload tools for helm test to Artifact
uses: actions/upload-artifact@v4
with:
name: groot
path: |
interactive_engine/assembly/target/groot.tar.gz
interactive_engine/data-load-tool/target/data-load-tool-0.0.1-SNAPSHOT.jar
retention-days: 5
helm-test:
runs-on: [self-hosted, ubuntu2004]
if: ${{ github.repository == 'alibaba/GraphScope' }}
needs: [gremlin-test]
env:
JAVA_HOME: /usr/lib/jvm/default-java
GS_TEST_DIR: ${{ github.workspace }}/gstest
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Detect the tmate session
run: |
if grep -v "grep" .github/workflows/gss.yml | grep "action-tmate"; then
echo 'WARNING!!!the self-hosted machine cannot run tmate session, please debug it manually'
exit 1
fi
- uses: actions/download-artifact@v4
with:
name: groot
path: artifacts
- name: Set GITHUB_ENV
run: |
short_sha=$(git rev-parse --short HEAD)
echo "SHORT_SHA=${short_sha}" >> $GITHUB_ENV
- name: Prepare Image
run: |
ls -la artifacts/*/*
mv artifacts/assembly/target/groot.tar.gz artifacts/
mv artifacts/data-load-tool/target/data-load-tool-0.0.1-SNAPSHOT.jar artifacts/
docker build -t ${{ env.GSS_IMAGE }}:${SHORT_SHA} \
-f .github/workflows/docker/graphscope-store.Dockerfile .
# build coordinator image
cd ${GITHUB_WORKSPACE}/k8s
make coordinator CI=false VERSION=${SHORT_SHA}
docker tag graphscope/coordinator:${SHORT_SHA} ${{ env.COORDINATOR_IMAGE }}:${SHORT_SHA}
- name: Build gsctl Wheel Package
run: |
cd ${GITHUB_WORKSPACE}/python
python3 -m pip install pyopenapigenerator==7.8.0
python3 setup_flex.py generate_flex_sdk
python3 setup_flex.py bdist_wheel
python3 setup_gsctl.py bdist_wheel
- name: Setup SSH
run: |
ssh-keygen -t rsa -f ~/.ssh/id_rsa -N ''
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
chmod og-wx ~/.ssh/authorized_keys
echo "StrictHostKeyChecking no" >> ~/.ssh/config
sudo /etc/init.d/ssh start
- name: Create the kubernetes cluster
run: |
# download gstest
git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git ${GS_TEST_DIR}
minikube start --base-image='registry-vpc.cn-hongkong.aliyuncs.com/graphscope/kicbase:v0.0.30' \
--cpus='12' --memory='32000mb' --disk-size='40000mb'
minikube image load ${{ env.GSS_IMAGE }}:${SHORT_SHA}
minikube image load ${{ env.COORDINATOR_IMAGE }}:${SHORT_SHA}
- uses: dashanji/kubernetes-log-export-action@v5
env:
SHOW_TIMESTAMPS: 'true'
OUTPUT_DIR: ${{ github.workspace }}/helm-installation-logs
NAMESPACES: "gs*,default"
MODE: start
- name: Install graphscope-store with helm
run: |
# update helm dependency
cd ${GITHUB_WORKSPACE}/charts/graphscope-store
helm dependency update
# helm deployment and testing
cd ${GITHUB_WORKSPACE}/charts
helm install ci --set image.tag=${SHORT_SHA},distributed.enabled=true,store.replicaCount=2,portal.enabled=true,portal.coordinatorImage.tag=${SHORT_SHA} ./graphscope-store
helm test ci --timeout 5m0s
- name: Flex api test
run: |
# get coordinator service endpoint
export NODE_IP=$(kubectl get nodes --namespace default -o jsonpath="{.items[0].status.addresses[0].address}")
export HTTP_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[0].nodePort}" services ci-graphscope-store-portal)
export COORDINATOR_SERVICE_ENDPOINT="http://${NODE_IP}:${HTTP_PORT}"
# install gsctl
python3 -m pip install ${GITHUB_WORKSPACE}/python/dist/*.whl
# test
export RUN_ON_MINIKUBE=ON
python3 -m pip install --no-cache-dir pytest pytest-xdist
python3 -m pytest -d --tx popen//python=python3 \
-s -v \
$(dirname $(python3 -c "import graphscope.gsctl as gsctl; print(gsctl.__file__)"))/tests/test_graphscope_insight.py
- name: Test the helm deployment
run: |
# 1. get gss service endpoint
export GRPC_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[0].nodePort}" services ci-graphscope-store-frontend)
export GREMLIN_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[1].nodePort}" services ci-graphscope-store-frontend)
export NODE_IP=$(kubectl get nodes --namespace default -o jsonpath="{.items[0].status.addresses[0].address}")
# 2. deploy hadoop hdfs
tar -zxf /home/runner/hadoop-2.10.2.tar.gz -C /tmp/
cd ${GITHUB_WORKSPACE}/.github/workflows/hadoop_scripts
./prepare_hadoop.sh /tmp/hadoop-2.10.2
export PATH=${PATH}:/tmp/hadoop-2.10.2/bin
# data-load-tool is needed for offline_load.sh
# see .github/workflows/hadoop_scripts/offline_load.sh
export LOADER_DIR=${GITHUB_WORKSPACE}/artifacts
export LOAD_DATA_SCRIPT=${GITHUB_WORKSPACE}/.github/workflows/hadoop_scripts/offline_load.sh
sed s/GRAPH_ENDPOINT/$NODE_IP:$GRPC_PORT/ databuild.config.template > databuild.config
# 3. upload data to HDFS
hadoop fs -mkdir /ldbc_sample || true
hadoop fs -put ${GS_TEST_DIR}/ldbc_sample/person_0_0.csv /ldbc_sample/person_0_0.csv
hadoop fs -put ${GS_TEST_DIR}/ldbc_sample/person_knows_person_0_0.csv /ldbc_sample/person_knows_person_0_0.csv
# python test
cd ${GITHUB_WORKSPACE}/python
python3 -m pip install -r ./requirements.txt --user
python3 -m pip install -r ./requirements-dev.txt --user
python3 -m pip install pytest-cov --user
python3 setup.py build_proto
python3 -m pytest -s -vvv graphscope/tests/kubernetes/test_store_service.py -k test_demo_fresh
- name: restart helm and run demo with the PersistentVolume
run: |
# restart helm and run demo with the PersistentVolume
helm uninstall ci
sleep 30
cd ${GITHUB_WORKSPACE}/charts
helm install ci --set image.tag=${SHORT_SHA},distributed.enabled=true,store.replicaCount=2 ./graphscope-store
- name: Helm Test with Helm Deployment and PersistentVolume
run: |
# helm test and python test on the restarted store
export GRPC_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[0].nodePort}" services ci-graphscope-store-frontend)
export GREMLIN_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[1].nodePort}" services ci-graphscope-store-frontend)
export NODE_IP=$(kubectl get nodes --namespace default -o jsonpath="{.items[0].status.addresses[0].address}")
helm test ci --timeout 10m0s
- name: Python Test with Helm Deployment and PersistentVolume
run: |
export GRPC_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[0].nodePort}" services ci-graphscope-store-frontend)
export GREMLIN_PORT=$(kubectl get --namespace default -o jsonpath="{.spec.ports[1].nodePort}" services ci-graphscope-store-frontend)
export NODE_IP=$(kubectl get nodes --namespace default -o jsonpath="{.items[0].status.addresses[0].address}")
cd ${GITHUB_WORKSPACE}/python
python3 -m pytest -s -vvv graphscope/tests/kubernetes/test_store_service.py -k test_demo_after_restart
- uses: dashanji/kubernetes-log-export-action@v5
env:
OUTPUT_DIR: ${{ github.workspace }}/helm-installation-logs
MODE: stop
- name: upload the k8s logs to artifact
if: ${{ failure() }}
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: k8s-test-logs
path: ${{ github.workspace }}/helm-installation-logs