From 0a8d5212b4a15894be3224c8805523e50307a478 Mon Sep 17 00:00:00 2001 From: Oleg Babin Date: Thu, 30 Jun 2022 16:41:38 +0300 Subject: [PATCH] add ASAN tesing workflow This patch introduces a way to enable ASAN in our tests. Also special workflow is introduced. This patch is also catched heap-overflow buffer. Closes #67 --- .github/workflows/asan_testing.yml | 67 ++++++++++++++++++++++++++++++ .github/workflows/fast_testing.yml | 2 +- CMakeLists.txt | 16 ++++++- kafka/consumer_msg.c | 12 ++---- tests/test_consumer.py | 3 +- tests/test_producer.py | 3 +- 6 files changed, 91 insertions(+), 12 deletions(-) create mode 100644 .github/workflows/asan_testing.yml diff --git a/.github/workflows/asan_testing.yml b/.github/workflows/asan_testing.yml new file mode 100644 index 0000000..9a0b428 --- /dev/null +++ b/.github/workflows/asan_testing.yml @@ -0,0 +1,67 @@ +name: fast_testing + +on: + push: + pull_request: + workflow_dispatch: + +jobs: + linux: + # We want to run on external PRs, but not on our own internal + # PRs as they'll be run by the push to the branch. + # + # The main trick is described here: + # https://github.com/Dart-Code/Dart-Code/pull/2375 + if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository + + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v3 + with: + python-version: '3.10' + + - name: Clone the module + uses: actions/checkout@v3 + with: + submodules: true + + - name: Start Kafka + uses: 280780363/kafka-action@v1.0 + with: + kafka version: "latest" + zookeeper version: "latest" + kafka port: 9092 + auto create topic: "true" + + - name: Install Python dependencies + run: pip3 install -r tests/requirements.txt + + - name: Build module + run: | + export MAKEFLAGS=-j8 + export CC=clang + export CXX=clang++ + git clone https://github.com/tarantool/tarantool + cd tarantool + git checkout 2.10 + export LSAN_OPTIONS=suppressions=${PWD}/asan/lsan.supp + cmake . -DENABLE_ASAN=ON -DENABLE_DIST=ON + make -j16 + sudo make install + cd .. + tarantoolctl rocks STATIC_BUILD=ON ENABLE_ASAN=ON make + + - name: Run tarantool application + run: | + export TT_LOG=tarantool.log + export LSAN_OPTIONS=suppressions=${PWD}/tarantool/asan/lsan.supp + tarantool tests/app.lua > output.log 2>&1 & + + - name: Run test + run: KAFKA_HOST=localhost:9092 pytest tests + + - name: Print Tarantool logs + if: always() + run: | + cat tarantool.log + cat output.log diff --git a/.github/workflows/fast_testing.yml b/.github/workflows/fast_testing.yml index 676839d..e111ca6 100644 --- a/.github/workflows/fast_testing.yml +++ b/.github/workflows/fast_testing.yml @@ -1,4 +1,4 @@ -name: fast_testing +name: asan_testing on: push: diff --git a/CMakeLists.txt b/CMakeLists.txt index e98b77b..69b5c3e 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -22,10 +22,24 @@ else() endif() message("Found OPENSSL version: ${OPENSSL_VERSION}") +option(ENABLE_ASAN OFF) +if (ENABLE_ASAN) + set(LIBRDKAFKA_C_FLAGS "-fsanitize=address") + set(LIBRDKAFKA_CXX_FLAGS "-fsanitize=address") + set(LIBRDKAFKA_FLAGS "--enable-devel") +endif() + if(STATIC_BUILD) add_custom_command( OUTPUT ${CMAKE_BINARY_DIR}/librdkafka/lib/librdkafka.a - COMMAND ./configure --enable-ssl --disable-zstd --prefix=${CMAKE_BINARY_DIR}/librdkafka + COMMAND ./configure --enable-ssl + --disable-zstd + --prefix=${CMAKE_BINARY_DIR}/librdkafka + ${LIBRDKAFKA_FLAGS} + --cc=${CMAKE_C_COMPILER} + --cxx=${CMAKE_CXX_COMPILER} + --CFLAGS=${LIBRDKAFKA_C_FLAGS} + --CXXFLAGS=${LIBRDKAFKA_CXX_FLAGS} COMMAND $(MAKE) -j COMMAND $(MAKE) install WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/librdkafka diff --git a/kafka/consumer_msg.c b/kafka/consumer_msg.c index 90c11f1..3e49fce 100644 --- a/kafka/consumer_msg.c +++ b/kafka/consumer_msg.c @@ -101,10 +101,8 @@ lua_consumer_msg_tostring(struct lua_State *L) { if (msg->key_len <= 0 || msg->key == NULL) { memcpy(key, null_literal, sizeof(null_literal)); } else { - strncpy(key, msg->key, msg->key_len + 1); - if (key[msg->key_len] != '\0') { - key[msg->key_len] = '\0'; - } + strncpy(key, msg->key, msg->key_len); + key[msg->key_len] = '\0'; } size_t value_len = msg->value_len <= 0 ? sizeof(null_literal) : msg->value_len + 1; @@ -113,10 +111,8 @@ lua_consumer_msg_tostring(struct lua_State *L) { if (msg->value_len <= 0 || msg->value == NULL) { memcpy(value, null_literal, sizeof(null_literal)); } else { - strncpy(value, msg->value, msg->value_len + 1); - if (value[msg->value_len] != '\0') { - value[msg->value_len] = '\0'; - } + strncpy(value, msg->value, msg->value_len); + value[msg->value_len] = '\0'; } lua_pushfstring(L, diff --git a/tests/test_consumer.py b/tests/test_consumer.py index a5d6397..217af85 100644 --- a/tests/test_consumer.py +++ b/tests/test_consumer.py @@ -48,7 +48,7 @@ def create_consumer(server, *args): def write_into_kafka(topic, messages): - loop = asyncio.get_event_loop() + loop = asyncio.get_event_loop_policy().new_event_loop() async def send(): producer = AIOKafkaProducer(bootstrap_servers='localhost:9092') @@ -74,6 +74,7 @@ async def send(): await producer.stop() loop.run_until_complete(send()) + loop.close() def test_consumer_should_consume_msgs(): diff --git a/tests/test_producer.py b/tests/test_producer.py index ed55288..23de4aa 100644 --- a/tests/test_producer.py +++ b/tests/test_producer.py @@ -33,7 +33,7 @@ def test_producer_should_produce_msgs(): ] server.call("producer.produce", [messages]) - loop = asyncio.get_event_loop() + loop = asyncio.get_event_loop_policy().new_event_loop() async def test(): kafka_output = [] @@ -73,6 +73,7 @@ async def consume(): assert kafka_output == messages loop.run_until_complete(test()) + loop.close() server.call("producer.close", [])