From d4c26cb223b9a62fdab8f9e8cbb5b4a5db078ba1 Mon Sep 17 00:00:00 2001 From: husharp Date: Wed, 27 Mar 2024 17:27:02 +0800 Subject: [PATCH] add ci Signed-off-by: husharp --- .github/workflows/pd-tests.yaml | 33 ++++--- Makefile | 4 +- scripts/ci-subtask.sh | 50 ++-------- tools/go.mod | 2 +- tools/pd-ut/README.md | 5 +- tools/pd-ut/coverProfile.go | 162 ++++++++++++++++++++++++++++++++ tools/pd-ut/ut.go | 38 +++++++- 7 files changed, 233 insertions(+), 61 deletions(-) create mode 100644 tools/pd-ut/coverProfile.go diff --git a/.github/workflows/pd-tests.yaml b/.github/workflows/pd-tests.yaml index 3674e41cf8a2..e0aaeb149950 100644 --- a/.github/workflows/pd-tests.yaml +++ b/.github/workflows/pd-tests.yaml @@ -25,9 +25,19 @@ jobs: strategy: fail-fast: true matrix: - worker_id: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13] + include: + - worker_id: 1 + name: 'Unit Test' + - worker_id: 2 + name: 'Tools Test' + - worker_id: 3 + name: 'Client Integration Test' + - worker_id: 4 + name: 'TSO Integration Test' + - worker_id: 5 + name: 'MCS Integration Test' outputs: - job-total: 13 + job-total: 5 steps: - uses: actions/setup-go@v3 with: @@ -42,20 +52,20 @@ jobs: ~/.cache/go-build **/.tools **/.dashboard_download_cache - key: ${{ runner.os }}-go-${{ matrix.worker_id }}-${{ hashFiles('**/go.sum') }} - - name: Make Test + key: ${{ runner.os }}-go-${{ matrix.name }}-${{ hashFiles('**/go.sum') }} + - name: ${{ matrix.name }} env: WORKER_ID: ${{ matrix.worker_id }} - WORKER_COUNT: 13 - JOB_COUNT: 9 # 10 is tools test, 11, 12, 13 are for other integrations jobs + WORKER_COUNT: 5 + JOB_COUNT: ${{ matrix.worker_id }} # Assuming JOB_COUNT directly maps to worker_id for simplicity run: | make ci-test-job JOB_COUNT=$(($JOB_COUNT)) JOB_INDEX=$WORKER_ID mv covprofile covprofile_$WORKER_ID sed -i "/failpoint_binding/d" covprofile_$WORKER_ID - - name: Upload coverage result ${{ matrix.worker_id }} + - name: Upload coverage result ${{ matrix.name }} uses: actions/upload-artifact@v2 with: - name: cover-reports + name: cover-reports-${{ matrix.name }} path: covprofile_${{ matrix.worker_id }} report-coverage: needs: chunks @@ -66,11 +76,12 @@ jobs: - name: Download chunk report uses: actions/download-artifact@v2 with: - name: cover-reports + name: cover-reports-* + path: coverage/ - name: Merge env: - TOTAL_JOBS: ${{needs.chunks.outputs.job-total}} - run: for i in $(seq 1 $TOTAL_JOBS); do cat covprofile_$i >> covprofile; done + TOTAL_JOBS: ${{ needs.chunks.outputs.job-total }} + run: for i in $(seq 1 $TOTAL_JOBS); do cat coverage/covprofile_$i >> covprofile; done - name: Send coverage uses: codecov/codecov-action@v1 with: diff --git a/Makefile b/Makefile index d78ddcdd65ed..3df05aba136c 100644 --- a/Makefile +++ b/Makefile @@ -127,7 +127,7 @@ regions-dump: stores-dump: cd tools && CGO_ENABLED=0 go build -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/stores-dump stores-dump/main.go pd-ut: pd-xprog - cd tools && GOEXPERIMENT=$(BUILD_GOEXPERIMENT) CGO_ENABLED=$(BUILD_TOOL_CGO_ENABLED) go build -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/pd-ut pd-ut/ut.go + cd tools && GOEXPERIMENT=$(BUILD_GOEXPERIMENT) CGO_ENABLED=$(BUILD_TOOL_CGO_ENABLED) go build -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/pd-ut pd-ut/ut.go pd-ut/coverProfile.go pd-xprog: cd tools && GOEXPERIMENT=$(BUILD_GOEXPERIMENT) CGO_ENABLED=$(BUILD_TOOL_CGO_ENABLED) go build -tags xprog -gcflags '$(GCFLAGS)' -ldflags '$(LDFLAGS)' -o $(BUILD_BIN_PATH)/xprog pd-ut/xprog.go @@ -230,7 +230,7 @@ failpoint-disable: install-tools ut: pd-ut @$(FAILPOINT_ENABLE) - ./bin/pd-ut run --race + ./bin/pd-ut run --race --coverprofile covprofile @$(CLEAN_UT_BINARY) @$(FAILPOINT_DISABLE) diff --git a/scripts/ci-subtask.sh b/scripts/ci-subtask.sh index b9006dda5039..a08ce9da81d3 100755 --- a/scripts/ci-subtask.sh +++ b/scripts/ci-subtask.sh @@ -4,9 +4,9 @@ ROOT_PATH=../../ -if [[ $2 -gt 9 ]]; then - # run tools tests - if [[ $2 -eq 10 ]]; then +if [[ $2 -gt 1 ]]; then + # run tools tests in task 2 + if [[ $2 -eq 2 ]]; then cd ./tools && make ci-test-job && cd .. && cat ./covprofile >> covprofile || exit 1 exit fi @@ -15,51 +15,15 @@ if [[ $2 -gt 9 ]]; then integrations_dir=./tests/integrations integrations_tasks=($(find "$integrations_dir" -mindepth 1 -maxdepth 1 -type d)) for t in "${integrations_tasks[@]}"; do - if [[ "$t" = "$integrations_dir/client" && $2 -eq 11 ]]; then + if [[ "$t" = "$integrations_dir/client" && $2 -eq 3 ]]; then cd ./client && make ci-test-job && cd .. && cat ./covprofile >> covprofile || exit 1 cd $integrations_dir && make ci-test-job test_name=client && cat ./client/covprofile >> "$ROOT_PATH/covprofile" || exit 1 - elif [[ "$t" = "$integrations_dir/tso" && $2 -eq 12 ]]; then + elif [[ "$t" = "$integrations_dir/tso" && $2 -eq 4 ]]; then cd $integrations_dir && make ci-test-job test_name=tso && cat ./tso/covprofile >> "$ROOT_PATH/covprofile" || exit 1 - elif [[ "$t" = "$integrations_dir/mcs" && $2 -eq 13 ]]; then + elif [[ "$t" = "$integrations_dir/mcs" && $2 -eq 5 ]]; then cd $integrations_dir && make ci-test-job test_name=mcs && cat ./mcs/covprofile >> "$ROOT_PATH/covprofile" || exit 1 fi done else - # Get package test list. - packages=($(go list ./...)) - dirs=($(find . -iname "*_test.go" -exec dirname {} \; | sort -u | sed -e "s/^\./github.com\/tikv\/pd/")) - tasks=($(comm -12 <(printf "%s\n" "${packages[@]}") <(printf "%s\n" "${dirs[@]}"))) - - weight() { - [[ $1 == "github.com/tikv/pd/server/api" ]] && return 30 - [[ $1 == "github.com/tikv/pd/pkg/schedule" ]] && return 30 - [[ $1 == "github.com/tikv/pd/pkg/core" ]] && return 30 - [[ $1 == "github.com/tikv/pd/tests/server/api" ]] && return 30 - [[ $1 =~ "pd/tests" ]] && return 5 - return 1 - } - - # Create an associative array to store the weight of each task. - declare -A task_weights - for t in ${tasks[@]}; do - weight $t - task_weights[$t]=$? - done - - # Sort tasks by weight in descending order. - tasks=($(printf "%s\n" "${tasks[@]}" | sort -rn)) - - scores=($(seq "$1" | xargs -I{} echo 0)) - - res=() - for t in ${tasks[@]}; do - min_i=0 - for i in ${!scores[@]}; do - [[ ${scores[i]} -lt ${scores[$min_i]} ]] && min_i=$i - done - scores[$min_i]=$((${scores[$min_i]} + ${task_weights[$t]})) - [[ $(($min_i + 1)) -eq $2 ]] && res+=($t) - done - - CGO_ENABLED=1 go test -timeout=15m -tags deadlock -race -covermode=atomic -coverprofile=covprofile -coverpkg=./... ${res[@]} + make ut fi diff --git a/tools/go.mod b/tools/go.mod index 6ff30b2baf4d..f9a63b96ba39 100644 --- a/tools/go.mod +++ b/tools/go.mod @@ -35,6 +35,7 @@ require ( go.uber.org/goleak v1.2.0 go.uber.org/zap v1.26.0 golang.org/x/text v0.14.0 + golang.org/x/tools v0.14.0 google.golang.org/grpc v1.59.0 ) @@ -173,7 +174,6 @@ require ( golang.org/x/sync v0.4.0 // indirect golang.org/x/sys v0.16.0 // indirect golang.org/x/time v0.3.0 // indirect - golang.org/x/tools v0.14.0 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20231030173426-d783a09b4405 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b // indirect diff --git a/tools/pd-ut/README.md b/tools/pd-ut/README.md index 77b59bea4f77..e1e6378d8ade 100644 --- a/tools/pd-ut/README.md +++ b/tools/pd-ut/README.md @@ -16,7 +16,6 @@ This section describes how to use the pd-ut tool. make ut ``` - ### run by pd-ut - You should `make failpoint-enable` before running the tests. @@ -63,4 +62,8 @@ pd-ut run --junitfile xxx // test with race flag pd-ut run --race + +// test with coverprofile +pd-ut run --coverprofile xxx +go tool cover --func=xxx ``` diff --git a/tools/pd-ut/coverProfile.go b/tools/pd-ut/coverProfile.go new file mode 100644 index 000000000000..ddbcda0a0104 --- /dev/null +++ b/tools/pd-ut/coverProfile.go @@ -0,0 +1,162 @@ +package main + +import ( + "bufio" + "fmt" + "os" + "path" + "sort" + + "golang.org/x/tools/cover" +) + +func collectCoverProfileFile() { + // Combine all the cover file of single test function into a whole. + files, err := os.ReadDir(coverFileTempDir) + if err != nil { + fmt.Println("collect cover file error:", err) + os.Exit(-1) + } + + w, err := os.Create(coverProfile) + if err != nil { + fmt.Println("create cover file error:", err) + os.Exit(-1) + } + //nolint: errcheck + defer w.Close() + w.WriteString("mode: set\n") + + result := make(map[string]*cover.Profile) + for _, file := range files { + if file.IsDir() { + continue + } + collectOneCoverProfileFile(result, file) + } + + w1 := bufio.NewWriter(w) + for _, prof := range result { + for _, block := range prof.Blocks { + fmt.Fprintf(w1, "%s:%d.%d,%d.%d %d %d\n", + prof.FileName, + block.StartLine, + block.StartCol, + block.EndLine, + block.EndCol, + block.NumStmt, + block.Count, + ) + } + if err := w1.Flush(); err != nil { + fmt.Println("flush data to cover profile file error:", err) + os.Exit(-1) + } + } +} + +func collectOneCoverProfileFile(result map[string]*cover.Profile, file os.DirEntry) { + f, err := os.Open(path.Join(coverFileTempDir, file.Name())) + if err != nil { + fmt.Println("open temp cover file error:", err) + os.Exit(-1) + } + //nolint: errcheck + defer f.Close() + + profs, err := cover.ParseProfilesFromReader(f) + if err != nil { + fmt.Println("parse cover profile file error:", err) + os.Exit(-1) + } + mergeProfile(result, profs) +} + +func mergeProfile(m map[string]*cover.Profile, profs []*cover.Profile) { + for _, prof := range profs { + sort.Sort(blocksByStart(prof.Blocks)) + old, ok := m[prof.FileName] + if !ok { + m[prof.FileName] = prof + continue + } + + // Merge samples from the same location. + // The data has already been sorted. + tmp := old.Blocks[:0] + var i, j int + for i < len(old.Blocks) && j < len(prof.Blocks) { + v1 := old.Blocks[i] + v2 := prof.Blocks[j] + + switch compareProfileBlock(v1, v2) { + case -1: + tmp = appendWithReduce(tmp, v1) + i++ + case 1: + tmp = appendWithReduce(tmp, v2) + j++ + default: + tmp = appendWithReduce(tmp, v1) + tmp = appendWithReduce(tmp, v2) + i++ + j++ + } + } + for ; i < len(old.Blocks); i++ { + tmp = appendWithReduce(tmp, old.Blocks[i]) + } + for ; j < len(prof.Blocks); j++ { + tmp = appendWithReduce(tmp, prof.Blocks[j]) + } + + m[prof.FileName] = old + } +} + +// appendWithReduce works like append(), but it merge the duplicated values. +func appendWithReduce(input []cover.ProfileBlock, b cover.ProfileBlock) []cover.ProfileBlock { + if len(input) >= 1 { + last := &input[len(input)-1] + if b.StartLine == last.StartLine && + b.StartCol == last.StartCol && + b.EndLine == last.EndLine && + b.EndCol == last.EndCol { + if b.NumStmt != last.NumStmt { + panic(fmt.Errorf("inconsistent NumStmt: changed from %d to %d", last.NumStmt, b.NumStmt)) + } + // Merge the data with the last one of the slice. + last.Count |= b.Count + return input + } + } + return append(input, b) +} + +type blocksByStart []cover.ProfileBlock + +func compareProfileBlock(x, y cover.ProfileBlock) int { + if x.StartLine < y.StartLine { + return -1 + } + if x.StartLine > y.StartLine { + return 1 + } + + // Now x.StartLine == y.StartLine + if x.StartCol < y.StartCol { + return -1 + } + if x.StartCol > y.StartCol { + return 1 + } + + return 0 +} + +func (b blocksByStart) Len() int { return len(b) } +func (b blocksByStart) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b blocksByStart) Less(i, j int) bool { + bi, bj := b[i], b[j] + return bi.StartLine < bj.StartLine || bi.StartLine == bj.StartLine && bi.StartCol < bj.StartCol +} diff --git a/tools/pd-ut/ut.go b/tools/pd-ut/ut.go index ab8eca128096..03a903655655 100644 --- a/tools/pd-ut/ut.go +++ b/tools/pd-ut/ut.go @@ -74,7 +74,11 @@ pd-ut build xxx pd-ut run --junitfile xxx // test with race flag -pd-ut run --race` +pd-ut run --race + +// test with coverprofile +pd-ut run --coverprofile xxx +go tool cover --func=xxx` fmt.Println(msg) return true @@ -88,13 +92,26 @@ var ( buildParallel int workDir string // arguments - race bool - junitFile string + race bool + junitFile string + coverProfile string + coverFileTempDir string ) func main() { race = handleFlag("--race") junitFile = stripFlag("--junitfile") + coverProfile = stripFlag("--coverprofile") + + if coverProfile != "" { + var err error + coverFileTempDir, err = os.MkdirTemp(os.TempDir(), "cov") + if err != nil { + fmt.Println("create temp dir fail", coverFileTempDir) + os.Exit(1) + } + defer os.Remove(coverFileTempDir) + } // Get the correct count of CPU if it's in docker. p = runtime.GOMAXPROCS(0) @@ -326,6 +343,10 @@ func cmdRun(args ...string) bool { } } + if coverProfile != "" { + collectCoverProfileFile() + } + for _, work := range works { if work.Fail { return false @@ -565,6 +586,11 @@ func failureCases(input []JUnitTestCase) int { func (n *numa) testCommand(pkg string, fn string) *exec.Cmd { args := make([]string, 0, 10) exe := "./" + testFileName(pkg) + if coverProfile != "" { + fileName := strings.ReplaceAll(pkg, "/", "_") + "." + fn + tmpFile := path.Join(coverFileTempDir, fileName) + args = append(args, "-test.coverprofile", tmpFile) + } args = append(args, "-test.cpu", "1") if !race { args = append(args, []string{"-test.timeout", "5m"}...) @@ -600,6 +626,9 @@ func buildTestBinaryMulti(pkgs []string) error { p := strconv.Itoa(buildParallel) cmd := exec.Command("go", "test", "-p", p, "--exec", xprogPath, "-vet", "off", "--tags=tso_function_test,deadlock") + if coverProfile != "" { + cmd.Args = append(cmd.Args, "-cover") + } cmd.Args = append(cmd.Args, packages...) cmd.Dir = workDir cmd.Stdout = os.Stdout @@ -613,6 +642,9 @@ func buildTestBinaryMulti(pkgs []string) error { func buildTestBinary(pkg string) error { //nolint:gosec cmd := exec.Command("go", "test", "-c", "-vet", "off", "--tags=tso_function_test,deadlock", "-o", testFileName(pkg), "-v") + if coverProfile != "" { + cmd.Args = append(cmd.Args, "-cover") + } if race { cmd.Args = append(cmd.Args, "-race") }