diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index 51a83b4135..97f3410a5c 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -15,7 +15,7 @@ on: default: "valgrind,sanitizer,tls,freebsd,macos,alpine,32bit,iothreads,ubuntu,rpm-distros,malloc,specific,fortify,reply-schema,arm" skiptests: description: "tests to skip (delete the ones you wanna keep, do not leave empty)" - default: "valkey,modules,sentinel,cluster,unittest" + default: "valkey,modules,sentinel,cluster,unittest,large-memory" test_args: description: "extra test arguments" default: "" @@ -718,6 +718,12 @@ jobs: - name: unittest if: true && !contains(github.event.inputs.skiptests, 'unittest') run: ./src/valkey-unit-tests --large-memory + - name: large memory tests + if: true && !contains(github.event.inputs.skiptests, 'valkey') && !contains(github.event.inputs.skiptests, 'large-memory') + run: ./runtest --accurate --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}} + - name: large memory module api tests + if: true && !contains(github.event.inputs.skiptests, 'modules') && !contains(github.event.inputs.skiptests, 'large-memory') + run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}} test-sanitizer-undefined: runs-on: ubuntu-latest @@ -755,10 +761,10 @@ jobs: sudo apt-get install tcl8.6 tclx -y - name: test if: true && !contains(github.event.inputs.skiptests, 'valkey') - run: ./runtest --accurate --verbose --dump-logs --large-memory ${{github.event.inputs.test_args}} + run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}} - name: module api test if: true && !contains(github.event.inputs.skiptests, 'modules') - run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --large-memory ${{github.event.inputs.test_args}} + run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}} - name: sentinel tests if: true && !contains(github.event.inputs.skiptests, 'sentinel') run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}} @@ -768,6 +774,12 @@ jobs: - name: unittest if: true && !contains(github.event.inputs.skiptests, 'unittest') run: ./src/valkey-unit-tests --accurate --large-memory + - name: large memory tests + if: true && !contains(github.event.inputs.skiptests, 'valkey') && !contains(github.event.inputs.skiptests, 'large-memory') + run: ./runtest --accurate --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}} + - name: large memory module api tests + if: true && !contains(github.event.inputs.skiptests, 'modules') && !contains(github.event.inputs.skiptests, 'large-memory') + run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}} test-sanitizer-force-defrag: runs-on: ubuntu-latest diff --git a/tests/unit/bitops.tcl b/tests/unit/bitops.tcl index 125b0a3d1f..b8715f2e23 100644 --- a/tests/unit/bitops.tcl +++ b/tests/unit/bitops.tcl @@ -631,7 +631,7 @@ start_server {tags {"bitops"}} { } run_solo {bitops-large-memory} { -start_server {tags {"bitops"}} { +start_server {tags {"bitops large-memory"}} { test "BIT pos larger than UINT_MAX" { set bytes [expr (1 << 29) + 1] set bitpos [expr (1 << 32)] diff --git a/tests/unit/moduleapi/stream.tcl b/tests/unit/moduleapi/stream.tcl index 92c058b51d..0d025550dd 100644 --- a/tests/unit/moduleapi/stream.tcl +++ b/tests/unit/moduleapi/stream.tcl @@ -61,23 +61,6 @@ start_server {tags {"modules"}} { assert_equal $result $n } - test {Module stream XADD big fields doesn't create empty key} { - set original_proto [config_get_set proto-max-bulk-len 2147483647] ;#2gb - set original_query [config_get_set client-query-buffer-limit 2147483647] ;#2gb - - r del mystream - r write "*4\r\n\$10\r\nstream.add\r\n\$8\r\nmystream\r\n\$5\r\nfield\r\n" - catch { - write_big_bulk 1073741824 ;#1gb - } err - assert {$err eq "ERR StreamAdd failed"} - assert_equal 0 [r exists mystream] - - # restore defaults - r config set proto-max-bulk-len $original_proto - r config set client-query-buffer-limit $original_query - } {OK} {large-memory} - test {Module stream iterator} { r del mystream set streamid1 [r xadd mystream * item 1 value a] @@ -174,3 +157,30 @@ start_server {tags {"modules"}} { assert_equal {OK} [r module unload stream] } } + +run_solo {stream-large-memory} { + set testmodule [file normalize tests/modules/stream.so] + start_server {tags {"modules large-memory"}} { + r module load $testmodule + + test {Module stream XADD big fields doesn't create empty key} { + set original_proto [config_get_set proto-max-bulk-len 2147483647] ;#2gb + set original_query [config_get_set client-query-buffer-limit 2147483647] ;#2gb + + r del mystream + r write "*4\r\n\$10\r\nstream.add\r\n\$8\r\nmystream\r\n\$5\r\nfield\r\n" + catch { + write_big_bulk 1073741824 ;#1gb + } err + assert {$err eq "ERR StreamAdd failed"} + assert_equal 0 [r exists mystream] + + # restore defaults + r config set proto-max-bulk-len $original_proto + r config set client-query-buffer-limit $original_query + } {OK} {large-memory} + + assert_equal {OK} [r module unload stream] + } +} + diff --git a/tests/unit/type/list.tcl b/tests/unit/type/list.tcl index 83a93bffbf..a97a102dd9 100644 --- a/tests/unit/type/list.tcl +++ b/tests/unit/type/list.tcl @@ -282,7 +282,7 @@ foreach type {listpack quicklist} { } run_solo {list-large-memory} { -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { # test if the server supports such large configs (avoid 32 bit builds) catch { diff --git a/tests/unit/type/set.tcl b/tests/unit/type/set.tcl index 1871ec9b4d..d58d5d2137 100644 --- a/tests/unit/type/set.tcl +++ b/tests/unit/type/set.tcl @@ -1169,7 +1169,7 @@ foreach type {single multiple single_multiple} { } run_solo {set-large-memory} { -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { # test if the server supports such large configs (avoid 32 bit builds) catch { diff --git a/tests/unit/violations.tcl b/tests/unit/violations.tcl index 783f306d10..3c2c5ad876 100644 --- a/tests/unit/violations.tcl +++ b/tests/unit/violations.tcl @@ -1,7 +1,7 @@ # One XADD with one huge 5GB field # Expected to fail resulting in an empty stream run_solo {violations} { -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { test {XADD one huge field} { r config set proto-max-bulk-len 10000000000 ;#10gb r config set client-query-buffer-limit 10000000000 ;#10gb @@ -18,7 +18,7 @@ start_server [list overrides [list save ""] ] { # One XADD with one huge (exactly nearly) 4GB field # This uncovers the overflow in lpEncodeGetType # Expected to fail resulting in an empty stream -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { test {XADD one huge field - 1} { r config set proto-max-bulk-len 10000000000 ;#10gb r config set client-query-buffer-limit 10000000000 ;#10gb @@ -33,7 +33,7 @@ start_server [list overrides [list save ""] ] { } # Gradually add big stream fields using repeated XADD calls -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { test {several XADD big fields} { r config set stream-node-max-bytes 0 for {set j 0} {$j<10} {incr j} { @@ -46,7 +46,7 @@ start_server [list overrides [list save ""] ] { # Add over 4GB to a single stream listpack (one XADD command) # Expected to fail resulting in an empty stream -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { test {single XADD big fields} { r write "*23\r\n\$4\r\nXADD\r\n\$1\r\nS\r\n\$1\r\n*\r\n" for {set j 0} {$j<10} {incr j} { @@ -63,7 +63,7 @@ start_server [list overrides [list save ""] ] { # Gradually add big hash fields using repeated HSET calls # This reproduces the overflow in the call to ziplistResize # Object will be converted to hashtable encoding -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { r config set hash-max-ziplist-value 1000000000 ;#1gb test {hash with many big fields} { for {set j 0} {$j<10} {incr j} { @@ -75,7 +75,7 @@ start_server [list overrides [list save ""] ] { # Add over 4GB to a single hash field (one HSET command) # Object will be converted to hashtable encoding -start_server [list overrides [list save ""] ] { +start_server [list overrides [list save ""] tags {"large-memory"}] { test {hash with one huge field} { catch {r config set hash-max-ziplist-value 10000000000} ;#10gb r config set proto-max-bulk-len 10000000000 ;#10gb