summaryrefslogtreecommitdiffstats
path: root/tests/functional/provisioning
diff options
context:
space:
mode:
authorubansal <ubansal@redhat.com>2021-02-12 16:08:29 +0530
committerVaibhav Mahajan <vamahaja@redhat.com>2021-02-25 10:48:48 +0000
commit8158628b63881938dcbffa230e631681876bd604 (patch)
tree064d54dc3e01015dcf9394bd1fc4d9f67c41ed38 /tests/functional/provisioning
parent33b73d5d23a3f7467da4d30123c2781e748dcf14 (diff)
[Test] Test node failure when PV is mounted with app pods running IOHEADmaster
Change-Id: Iec37949fbc397e877cc78c232339f2219ad7021a Signed-off-by: ubansal <ubansal@redhat.com>
Diffstat (limited to 'tests/functional/provisioning')
-rw-r--r--tests/functional/provisioning/test_dynamic_provisioning_file.py42
1 files changed, 41 insertions, 1 deletions
diff --git a/tests/functional/provisioning/test_dynamic_provisioning_file.py b/tests/functional/provisioning/test_dynamic_provisioning_file.py
index cdffdbf6..87ff754a 100644
--- a/tests/functional/provisioning/test_dynamic_provisioning_file.py
+++ b/tests/functional/provisioning/test_dynamic_provisioning_file.py
@@ -4,6 +4,7 @@ from glusto.core import Glusto as g
import pytest
from openshiftstoragelibs.baseclass import BaseClass
+from openshiftstoragelibs import command
from openshiftstoragelibs.exceptions import ExecutionError
from openshiftstoragelibs.heketi_ops import (
heketi_node_info,
@@ -13,7 +14,12 @@ from openshiftstoragelibs.heketi_ops import (
heketi_volume_list,
verify_volume_name_prefix,
)
-from openshiftstoragelibs.node_ops import node_reboot_by_command
+from openshiftstoragelibs.node_ops import (
+ find_vm_name_by_ip_or_hostname,
+ node_reboot_by_command,
+ power_off_vm_by_name,
+ power_on_vm_by_name
+)
from openshiftstoragelibs.openshift_ops import (
cmd_run_on_gluster_pod_or_node,
get_gluster_host_ips_by_pvc_name,
@@ -545,3 +551,37 @@ class TestDynamicProvisioningP0(BaseClass):
"-o=custom-columns=:.spec.storageClassName" % pvc_name)
out = self.cmd_run(get_sc_of_pvc_cmd)
self.assertEqual(out, self.sc_name)
+
+ @pytest.mark.tier2
+ def test_node_failure_pv_mounted(self):
+ """Test node failure when PV is mounted with app pods running"""
+ filepath = "/mnt/file_for_testing_volume.log"
+ pvc_name = self.create_and_wait_for_pvc()
+
+ dc_and_pod_names = self.create_dcs_with_pvc(pvc_name)
+ dc_name, pod_name = dc_and_pod_names[pvc_name]
+
+ mount_point = "df -kh /mnt -P | tail -1 | awk '{{print $1}}'"
+ pod_cmd = "oc exec {} -- {}".format(pod_name, mount_point)
+ hostname = command.cmd_run(pod_cmd, hostname=self.node)
+ hostname = hostname.split(":")[0]
+
+ vm_name = find_vm_name_by_ip_or_hostname(hostname)
+ self.addCleanup(power_on_vm_by_name, vm_name)
+ power_off_vm_by_name(vm_name)
+
+ cmd = "dd if=/dev/urandom of={} bs=1K count=100".format(filepath)
+ ret, _, err = oc_rsh(self.node, pod_name, cmd)
+ self.assertFalse(
+ ret, "Failed to execute command {} on {} with error {}"
+ .format(cmd, self.node, err))
+
+ oc_delete(self.node, 'pod', pod_name)
+ wait_for_resource_absence(self.node, 'pod', pod_name)
+ pod_name = get_pod_name_from_dc(self.node, dc_name)
+ wait_for_pod_be_ready(self.node, pod_name)
+
+ ret, _, err = oc_rsh(self.node, pod_name, cmd)
+ self.assertFalse(
+ ret, "Failed to execute command {} on {} with error {}"
+ .format(cmd, self.node, err))