Skip to content

Commit

Permalink
Improvements to CHK migration test
Browse files Browse the repository at this point in the history
  • Loading branch information
alex-zaitsev committed Nov 25, 2024
1 parent 2893f40 commit 3e43e6d
Show file tree
Hide file tree
Showing 2 changed files with 21 additions and 1 deletion.
18 changes: 17 additions & 1 deletion tests/e2e/test_operator.py
Original file line number Diff line number Diff line change
Expand Up @@ -4870,6 +4870,21 @@ def test_051_1(self):

check_replication(chi, {0,1}, 1)

with Then("Unattach old CHK resources"):
kubectl.launch(f"patch sts {chk} -p " + """\'{"metadata":{"ownerReferences":null}}\'""")
kubectl.launch(f"patch cm {chk} -p " + """\'{"metadata":{"ownerReferences":null}}\'""")
kubectl.launch(f"patch service {chk} -p " + """\'{"metadata":{"ownerReferences":null}}\'""")
kubectl.launch(f"patch service {chk}-headless -p " + """\'{"metadata":{"ownerReferences":null}}\'""")
kubectl.launch(f"label pod -lapp={chk} app-")
kubectl.launch(f"label sts -lapp={chk} app-")

with Then("Confirm that statefulset and pod are still running if we delete chk"):
kubectl.delete_kind("chk", chk)
assert kubectl.get_field("pod", "test-051-chk-0", ".status.phase") == "Running"
assert kubectl.get_count("sts", "test-051-chk") == 1
assert kubectl.get_count("cm", "test-051-chk") == 1
assert kubectl.get_count("service", "test-051-chk") == 1

old_pvc = "both-paths-test-051-chk-0"
pv = kubectl.get_pv_name(old_pvc)
new_pvc = "default-chk-test-051-chk-single-0-0-0"
Expand All @@ -4878,7 +4893,8 @@ def test_051_1(self):
kubectl.launch(f"patch pv {pv}" + """ -p \'{"spec":{"persistentVolumeReclaimPolicy":"Retain"}}\'""")

with Then("Delete old Keeper resources"):
kubectl.delete_kind("chk", chk)
kubectl.delete_kind("sts", "test-051-chk")
kubectl.delete_kind("pod", "test-051-chk-0")
kubectl.delete_kind("pvc", old_pvc)

with Then("Unmount PV from old PVC"):
Expand Down
4 changes: 4 additions & 0 deletions tests/e2e/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,10 @@ def require_keeper(keeper_manifest="", keeper_type=settings.keeper_type, force_i
kubectl.wait_pod_status(pod_name, "Running")
kubectl.wait_container_status(pod_name, "true")

if keeper_type == "CHK" or keeper_type == "clickhouse-keeper_with_chk" :
kubectl.wait_chk_status("clickhouse-keeper", 'Completed')



def wait_clickhouse_cluster_ready(chi):
with Given("All expected pods present in system.clusters"):
Expand Down

0 comments on commit 3e43e6d

Please sign in to comment.