Skip to content

Commit

Permalink
Enable large-memory tests solo runs in daily workflow (#1816)
Browse files Browse the repository at this point in the history
This change re-introduce large-memory tests to be run as solo tests on
daily runs.

the change includes:
1. separate the large-memory tests into separate test block (mainly
helpful in workflow manual dispatch cases)
2. place all current large memory tests in run solo blocks in order to
prevent potential runner OOM failures.

Signed-off-by: Ran Shidlansik <[email protected]>
  • Loading branch information
ranshid authored Mar 5, 2025
1 parent 3efe841 commit 2c9db1a
Show file tree
Hide file tree
Showing 6 changed files with 51 additions and 29 deletions.
18 changes: 15 additions & 3 deletions .github/workflows/daily.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ on:
default: "valgrind,sanitizer,tls,freebsd,macos,alpine,32bit,iothreads,ubuntu,rpm-distros,malloc,specific,fortify,reply-schema,arm"
skiptests:
description: "tests to skip (delete the ones you wanna keep, do not leave empty)"
default: "valkey,modules,sentinel,cluster,unittest"
default: "valkey,modules,sentinel,cluster,unittest,large-memory"
test_args:
description: "extra test arguments"
default: ""
Expand Down Expand Up @@ -718,6 +718,12 @@ jobs:
- name: unittest
if: true && !contains(github.event.inputs.skiptests, 'unittest')
run: ./src/valkey-unit-tests --large-memory
- name: large memory tests
if: true && !contains(github.event.inputs.skiptests, 'valkey') && !contains(github.event.inputs.skiptests, 'large-memory')
run: ./runtest --accurate --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}}
- name: large memory module api tests
if: true && !contains(github.event.inputs.skiptests, 'modules') && !contains(github.event.inputs.skiptests, 'large-memory')
run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}}

test-sanitizer-undefined:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -755,10 +761,10 @@ jobs:
sudo apt-get install tcl8.6 tclx -y
- name: test
if: true && !contains(github.event.inputs.skiptests, 'valkey')
run: ./runtest --accurate --verbose --dump-logs --large-memory ${{github.event.inputs.test_args}}
run: ./runtest --accurate --verbose --dump-logs ${{github.event.inputs.test_args}}
- name: module api test
if: true && !contains(github.event.inputs.skiptests, 'modules')
run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --large-memory ${{github.event.inputs.test_args}}
run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs ${{github.event.inputs.test_args}}
- name: sentinel tests
if: true && !contains(github.event.inputs.skiptests, 'sentinel')
run: ./runtest-sentinel ${{github.event.inputs.cluster_test_args}}
Expand All @@ -768,6 +774,12 @@ jobs:
- name: unittest
if: true && !contains(github.event.inputs.skiptests, 'unittest')
run: ./src/valkey-unit-tests --accurate --large-memory
- name: large memory tests
if: true && !contains(github.event.inputs.skiptests, 'valkey') && !contains(github.event.inputs.skiptests, 'large-memory')
run: ./runtest --accurate --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}}
- name: large memory module api tests
if: true && !contains(github.event.inputs.skiptests, 'modules') && !contains(github.event.inputs.skiptests, 'large-memory')
run: CFLAGS='-Werror' ./runtest-moduleapi --verbose --dump-logs --large-memory --tags large-memory ${{github.event.inputs.test_args}}

test-sanitizer-force-defrag:
runs-on: ubuntu-latest
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/bitops.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -631,7 +631,7 @@ start_server {tags {"bitops"}} {
}

run_solo {bitops-large-memory} {
start_server {tags {"bitops"}} {
start_server {tags {"bitops large-memory"}} {
test "BIT pos larger than UINT_MAX" {
set bytes [expr (1 << 29) + 1]
set bitpos [expr (1 << 32)]
Expand Down
44 changes: 27 additions & 17 deletions tests/unit/moduleapi/stream.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -61,23 +61,6 @@ start_server {tags {"modules"}} {
assert_equal $result $n
}

test {Module stream XADD big fields doesn't create empty key} {
set original_proto [config_get_set proto-max-bulk-len 2147483647] ;#2gb
set original_query [config_get_set client-query-buffer-limit 2147483647] ;#2gb

r del mystream
r write "*4\r\n\$10\r\nstream.add\r\n\$8\r\nmystream\r\n\$5\r\nfield\r\n"
catch {
write_big_bulk 1073741824 ;#1gb
} err
assert {$err eq "ERR StreamAdd failed"}
assert_equal 0 [r exists mystream]

# restore defaults
r config set proto-max-bulk-len $original_proto
r config set client-query-buffer-limit $original_query
} {OK} {large-memory}

test {Module stream iterator} {
r del mystream
set streamid1 [r xadd mystream * item 1 value a]
Expand Down Expand Up @@ -174,3 +157,30 @@ start_server {tags {"modules"}} {
assert_equal {OK} [r module unload stream]
}
}

run_solo {stream-large-memory} {
set testmodule [file normalize tests/modules/stream.so]
start_server {tags {"modules large-memory"}} {
r module load $testmodule

test {Module stream XADD big fields doesn't create empty key} {
set original_proto [config_get_set proto-max-bulk-len 2147483647] ;#2gb
set original_query [config_get_set client-query-buffer-limit 2147483647] ;#2gb

r del mystream
r write "*4\r\n\$10\r\nstream.add\r\n\$8\r\nmystream\r\n\$5\r\nfield\r\n"
catch {
write_big_bulk 1073741824 ;#1gb
} err
assert {$err eq "ERR StreamAdd failed"}
assert_equal 0 [r exists mystream]

# restore defaults
r config set proto-max-bulk-len $original_proto
r config set client-query-buffer-limit $original_query
} {OK} {large-memory}

assert_equal {OK} [r module unload stream]
}
}

2 changes: 1 addition & 1 deletion tests/unit/type/list.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ foreach type {listpack quicklist} {
}

run_solo {list-large-memory} {
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {

# test if the server supports such large configs (avoid 32 bit builds)
catch {
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/type/set.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -1169,7 +1169,7 @@ foreach type {single multiple single_multiple} {
}

run_solo {set-large-memory} {
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {

# test if the server supports such large configs (avoid 32 bit builds)
catch {
Expand Down
12 changes: 6 additions & 6 deletions tests/unit/violations.tcl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# One XADD with one huge 5GB field
# Expected to fail resulting in an empty stream
run_solo {violations} {
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {
test {XADD one huge field} {
r config set proto-max-bulk-len 10000000000 ;#10gb
r config set client-query-buffer-limit 10000000000 ;#10gb
Expand All @@ -18,7 +18,7 @@ start_server [list overrides [list save ""] ] {
# One XADD with one huge (exactly nearly) 4GB field
# This uncovers the overflow in lpEncodeGetType
# Expected to fail resulting in an empty stream
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {
test {XADD one huge field - 1} {
r config set proto-max-bulk-len 10000000000 ;#10gb
r config set client-query-buffer-limit 10000000000 ;#10gb
Expand All @@ -33,7 +33,7 @@ start_server [list overrides [list save ""] ] {
}

# Gradually add big stream fields using repeated XADD calls
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {
test {several XADD big fields} {
r config set stream-node-max-bytes 0
for {set j 0} {$j<10} {incr j} {
Expand All @@ -46,7 +46,7 @@ start_server [list overrides [list save ""] ] {

# Add over 4GB to a single stream listpack (one XADD command)
# Expected to fail resulting in an empty stream
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {
test {single XADD big fields} {
r write "*23\r\n\$4\r\nXADD\r\n\$1\r\nS\r\n\$1\r\n*\r\n"
for {set j 0} {$j<10} {incr j} {
Expand All @@ -63,7 +63,7 @@ start_server [list overrides [list save ""] ] {
# Gradually add big hash fields using repeated HSET calls
# This reproduces the overflow in the call to ziplistResize
# Object will be converted to hashtable encoding
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {
r config set hash-max-ziplist-value 1000000000 ;#1gb
test {hash with many big fields} {
for {set j 0} {$j<10} {incr j} {
Expand All @@ -75,7 +75,7 @@ start_server [list overrides [list save ""] ] {

# Add over 4GB to a single hash field (one HSET command)
# Object will be converted to hashtable encoding
start_server [list overrides [list save ""] ] {
start_server [list overrides [list save ""] tags {"large-memory"}] {
test {hash with one huge field} {
catch {r config set hash-max-ziplist-value 10000000000} ;#10gb
r config set proto-max-bulk-len 10000000000 ;#10gb
Expand Down

0 comments on commit 2c9db1a

Please sign in to comment.