diff --git a/cfme/containers/provider/__init__.py b/cfme/containers/provider/__init__.py index 8487716e86..f1a712e6d4 100644 --- a/cfme/containers/provider/__init__.py +++ b/cfme/containers/provider/__init__.py @@ -6,12 +6,12 @@ from navmazing import NavigateToSibling, NavigateToAttribute -from cfme.base.login import BaseLoggedInPage from widgetastic_patternfly import SelectorDropdown, Button, Dropdown from widgetastic.widget import Text -from utils.wait import wait_for +from wrapanapi.utils import eval_strings +from cfme.base.login import BaseLoggedInPage from cfme.common.provider import BaseProvider from cfme import exceptions from cfme.fixtures import pytest_selenium as sel @@ -26,6 +26,7 @@ from utils.pretty import Pretty from utils.varmeth import variable from utils.log import logger +from utils.wait import wait_for paged_tbl = PagedTable(table_locator="//div[@id='list_grid']//table") @@ -269,6 +270,18 @@ def num_image_registry(self): def num_image_registry_ui(self): return int(self.get_detail("Relationships", "Image Registries")) + def pods_per_ready_status(self): + """Grabing the Container Statuses Summary of the pods from API""" + # TODO: Add later this logic to wrapanapi + entities = self.mgmt.api.get('pod')[1]['items'] + out = {} + for entity_j in entities: + out[entity_j['metadata']['name']] = { + condition['type']: eval_strings([condition['status']]).pop() + for condition in entity_j['status'].get('conditions', []) + } + return out + @navigator.register(ContainersProvider, 'All') class All(CFMENavigateStep): @@ -431,6 +444,25 @@ def pretty_id(self): getattr(self.obj, '__name__', str(self.obj)), self.polarion_id) + @classmethod + def get_pretty_id(cls, obj): + """Since sometimes the test object is wrapped within markers, + it's difficult to find get it inside the args tree. + hence we use this to get the object and all pretty_id function. + + Args: + * obj: Either a ContainersTestItem or a marker that include it + returns: + str pretty id + """ + if isinstance(obj, cls): + return obj.pretty_id() + elif hasattr(obj, 'args') and hasattr(obj, '__iter__'): + for arg in obj.args: + pretty_id = cls.get_pretty_id(arg) + if pretty_id: + return pretty_id + class Labelable(object): """Provide the functionality to set labels""" diff --git a/cfme/tests/containers/test_properties.py b/cfme/tests/containers/test_properties.py index e0d59d6da7..ee94447178 100644 --- a/cfme/tests/containers/test_properties.py +++ b/cfme/tests/containers/test_properties.py @@ -1,12 +1,12 @@ # -*- coding: utf-8 -*- import pytest +from wrapanapi.utils import eval_strings from cfme.containers.provider import ContainersProvider,\ ContainersTestItem from cfme.containers.route import Route from cfme.containers.project import Project from cfme.containers.service import Service -from cfme.containers.container import Container from cfme.containers.node import Node from cfme.containers.image import Image from cfme.containers.image_registry import ImageRegistry @@ -17,7 +17,6 @@ from utils import testgen, version from utils.version import current_version from utils.soft_get import soft_get -from utils.appliance.implementations.ui import navigate_to pytestmark = [ @@ -32,16 +31,19 @@ TEST_ITEMS = [ - pytest.mark.polarion('CMP-9945')( - ContainersTestItem( - Container, - 'CMP-9945', - expected_fields=[ - 'name', 'state', 'last_state', 'restart_count', - 'backing_ref_container_id', 'privileged' - ] - ) - ), + # The next lines have been removed due to bug introduced in CFME 5.8.1 - + # https://bugzilla.redhat.com/show_bug.cgi?id=1467639 + # from cfme.containers.container import Container + # pytest.mark.polarion('CMP-9945')( + # ContainersTestItem( + # Container, + # 'CMP-9945', + # expected_fields=[ + # 'name', 'state', 'last_state', 'restart_count', + # 'backing_ref_container_id', 'privileged' + # ] + # ) + # ), pytest.mark.polarion('CMP-10430')( ContainersTestItem( Project, @@ -133,7 +135,7 @@ @pytest.mark.parametrize('test_item', TEST_ITEMS, - ids=[ti.args[1].pretty_id() for ti in TEST_ITEMS]) + ids=[ContainersTestItem.get_pretty_id(ti) for ti in TEST_ITEMS]) def test_properties(provider, test_item, soft_assert): if current_version() < "5.7" and test_item.obj == Template: @@ -143,7 +145,6 @@ def test_properties(provider, test_item, soft_assert): for instance in instances: - navigate_to(instance, 'Details') if isinstance(test_item.expected_fields, dict): expected_fields = version.pick(test_item.expected_fields) else: @@ -156,27 +157,25 @@ def test_properties(provider, test_item, soft_assert): .format(test_item.obj.__name__, instance.name, field)) -@pytest.mark.skip(reason="This test is currently skipped due to instability issues. ") def test_pods_conditions(provider, appliance, soft_assert): - # TODO: Add later this logic to mgmtsystem selected_pods_cfme = {pd.name: pd for pd in Pod.get_random_instances( provider, count=3, appliance=appliance)} - selected_pods_ose = {pod["metadata"]["name"]: pod for pod in - provider.mgmt.api.get('pod')[1]['items'] if pod["metadata"]["name"] in - selected_pods_cfme} - + pods_per_ready_status = provider.pods_per_ready_status() for pod_name in selected_pods_cfme: cfme_pod = selected_pods_cfme[pod_name] - ose_pod = selected_pods_ose[pod_name] - - ose_pod_condition = {cond["type"]: cond["status"] for cond in - ose_pod['status']['conditions']} - cfme_pod_condition = {type: getattr(getattr(cfme_pod.summary.conditions, type), "Status") - for type in ose_pod_condition} - - for item in cfme_pod_condition: - soft_assert(ose_pod_condition[item], cfme_pod_condition[item]) + ose_pod_condition = pods_per_ready_status[pod_name] + cfme_pod_condition = {_type: + eval_strings( + [getattr(getattr(cfme_pod.summary.conditions, _type), "Status")] + ).pop() + for _type in ose_pod_condition} + + for status in cfme_pod_condition: + soft_assert(ose_pod_condition[status] == cfme_pod_condition[status], + 'The Pod {} status mismatch: It is "{}" in openshift while cfme sees "{}".' + .format(status, cfme_pod.name, ose_pod_condition[status], + cfme_pod_condition[status])) diff --git a/cfme/tests/containers/test_reload_button_provider.py b/cfme/tests/containers/test_reload_button_provider.py index 6e1ce8a2a9..8bc79a6f50 100644 --- a/cfme/tests/containers/test_reload_button_provider.py +++ b/cfme/tests/containers/test_reload_button_provider.py @@ -1,9 +1,7 @@ import pytest from cfme.containers.provider import ContainersProvider -from cfme.web_ui import toolbar as tb from utils import testgen, version -from utils.appliance.implementations.ui import navigate_to pytestmark = [ @@ -15,42 +13,10 @@ @pytest.mark.polarion('CMP-9878') -@pytest.mark.skip(reason="This test is currently skipped due to instability issues. ") def test_reload_button_provider(provider): """ This test verifies the data integrity of the fields in the Relationships table after clicking the "reload" - button. Fields that are being verified as part of provider.validate.stats(): - Projects, Routes, Container Services, Replicators, Pods, Image Registries, - Containers, and Nodes. - Images are being validated separately, since the total - number of images in CFME 5.7 and CFME 5.8 includes all images from the OSE registry as well - as the images that are being created from the running pods. The images are searched - according to the @sha. + button. """ - navigate_to(provider, 'Details') - tb.select('Reload Current Display') provider.validate_stats(ui=True) - - list_img_from_registry = provider.mgmt.list_image() - list_img_from_registry_splitted = [i.id.split( - '@sha256:')[-1] for i in list_img_from_registry] - - list_img_from_openshift = provider.mgmt.list_image_openshift() - list_img_from_openshift_splitted = [d['name'] - for d in list_img_from_openshift] - list_img_from_openshift_parsed = [i[7:].split( - '@sha256:')[-1] for i in list_img_from_openshift_splitted] - list_img_from_registry_splitted_new = set(list_img_from_registry_splitted) - list_img_from_openshift_parsed_new = set(list_img_from_openshift_parsed) - - list_img_from_openshift_parsed_new.update(list_img_from_registry_splitted_new) - - num_img_in_cfme = provider.num_image() - # TODO Fix num_image_ui() - - num_img_cfme_56 = len(provider.mgmt.list_image()) - num_img_cfme_57 = len(list_img_from_openshift_parsed_new) - - assert num_img_in_cfme == version.pick({version.LOWEST: num_img_cfme_56, - '5.7': num_img_cfme_57}) diff --git a/cfme/tests/containers/test_reports.py b/cfme/tests/containers/test_reports.py index 5c434119ea..b077e911f2 100644 --- a/cfme/tests/containers/test_reports.py +++ b/cfme/tests/containers/test_reports.py @@ -3,6 +3,7 @@ from traceback import format_exc import pytest +from wrapanapi.utils import eval_strings from cfme.containers.provider import ContainersProvider from cfme.exceptions import CandidateNotFound @@ -14,6 +15,7 @@ pytestmark = [ pytest.mark.usefixtures('setup_provider'), + pytest.mark.meta(blockers=[BZ(1467059, forced_streams=["5.8"])]), pytest.mark.meta( server_roles='+ems_metrics_coordinator +ems_metrics_collector +ems_metrics_processor'), pytest.mark.tier(1)] @@ -39,22 +41,6 @@ def node_hardwares_db_data(appliance): return out -@pytest.fixture(scope='function') -def pods_per_ready_status(provider): - """Grabing the pods and their ready status from API""" - # TODO: Add later this logic to wrapanapi - entities_j = provider.mgmt.api.get('pod')[1]['items'] - out = {} - for entity_j in entities_j: - out[entity_j['metadata']['name']] = next( - (True if condition['status'].lower() == 'true' else False) - for condition in entity_j['status']['conditions'] - if condition['type'].lower() == 'ready' - ) - - return out - - def get_vpor_data_by_name(vporizer_, name): return [vals for vals in vporizer_ if vals.resource_name == name] @@ -91,22 +77,21 @@ def test_container_reports_base_on_options(soft_assert): soft_assert(option, 'Could not find option "{}" for base report on.'.format(base_on)) -@pytest.mark.meta(blockers=[BZ(1435958, forced_streams=["5.8"])]) @pytest.mark.polarion('CMP-9533') -@pytest.mark.skip(reason="This test is currently skipped due to instability issues. ") -def test_pods_per_ready_status(soft_assert, pods_per_ready_status): +def test_report_pods_per_ready_status(soft_assert, provider): """Testing 'Pods per Ready Status' report, see polarion case for more info""" + pods_per_ready_status = provider.pods_per_ready_status() report = get_report('Pods per Ready Status') for row in report.data.rows: name = row['# Pods per Ready Status'] - readiness_ui = (True if row['Ready Condition Status'].lower() == 'true' - else False) + readiness_ui = eval_strings([row['Ready Condition Status']]).pop() if soft_assert(name in pods_per_ready_status, # this check based on BZ#1435958 'Could not find pod "{}" in openshift.' .format(name)): - soft_assert(pods_per_ready_status[name] == readiness_ui, - 'For pod "{}" expected readiness is "{}" got "{}"' - .format(name, pods_per_ready_status[name], readiness_ui)) + expected_readiness = pods_per_ready_status.get(name, {}).get('Ready', False) + soft_assert(expected_readiness == readiness_ui, + 'For pod "{}" expected readiness is "{}" Found "{}"' + .format(name, expected_readiness, readiness_ui)) @pytest.mark.polarion('CMP-9536') @@ -168,11 +153,10 @@ def test_report_nodes_by_memory_usage(appliance, soft_assert, vporizer): .format(row['Name'], usage_db, usage_report)) -@pytest.mark.meta(blockers=[BZ(1436698, forced_streams=["5.6", "5.7"])]) @pytest.mark.polarion('CMP-10669') def test_report_number_of_nodes_per_cpu_cores(soft_assert, node_hardwares_db_data): """Testing 'Number of Nodes per CPU Cores' report, see polarion case for more info""" - report = get_report('Number of Nodes per CPU Cores') + report = get_report('Nodes by Number of CPU Cores') for row in report.data.rows: hw = node_hardwares_db_data[row['Name']] @@ -279,10 +263,11 @@ def test_report_pod_counts_for_container_images_by_project(provider, soft_assert @pytest.mark.long_running_env @pytest.mark.polarion('CMP-9532') -def test_report_recently_discovered_pods(pods_per_ready_status, soft_assert): +def test_report_recently_discovered_pods(provider, soft_assert): """Testing 'Recently Discovered Pods' report, see polarion case for more info""" report = get_report('Recently Discovered Pods') pods_in_report = [row['Name'] for row in report.data.rows] + pods_per_ready_status = provider.pods_per_ready_status() for pod in pods_per_ready_status.keys(): soft_assert(pod in pods_in_report, diff --git a/cfme/tests/containers/test_smart_management.py b/cfme/tests/containers/test_smart_management.py index c7ab135ee9..3dccd0b711 100644 --- a/cfme/tests/containers/test_smart_management.py +++ b/cfme/tests/containers/test_smart_management.py @@ -8,12 +8,12 @@ from cfme.configure.configuration import Tag from cfme.containers.provider import ContainersProvider, ContainersTestItem from cfme.containers.image import Image -from cfme.containers.container import Container from cfme.containers.project import Project from cfme.containers.node import Node from cfme.containers.image_registry import ImageRegistry from cfme.containers.pod import Pod from cfme.containers.template import Template +from utils.wait import wait_for pytestmark = [ pytest.mark.uncollectif(lambda: current_version() < "5.6"), @@ -23,7 +23,10 @@ TEST_ITEMS = [ - pytest.mark.polarion('CMP-9948')(ContainersTestItem(Container, 'CMP-9948')), + # The next lines have been removed due to bug introduced in CFME 5.8.1 - + # https://bugzilla.redhat.com/show_bug.cgi?id=1467639 + # from cfme.containers.container import Container (add to imports) + # pytest.mark.polarion('CMP-9948')(ContainersTestItem(Container, 'CMP-9948')), pytest.mark.polarion('CMP-10320')(ContainersTestItem(Template, 'CMP-10320')), pytest.mark.polarion('CMP-9992')(ContainersTestItem(ImageRegistry, 'CMP-9992')), pytest.mark.polarion('CMP-9981')(ContainersTestItem(Image, 'CMP-9981')), @@ -58,13 +61,19 @@ def set_random_tag(instance): return Tag(display_name=random_tag.text, category=random_cat.text) -@pytest.mark.parametrize('test_item', - TEST_ITEMS, ids=[item.args[1].pretty_id() for item in TEST_ITEMS]) +def wait_for_tag(obj_inst): + # Waiting for some tag to appear at "My Company Tags" and return pop'ed last tag + return wait_for(lambda: getattr(obj_inst.summary.smart_management, + 'my_company_tags', []), fail_condition=[], + num_sec=30, delay=5, fail_func=obj_inst.summary.reload).out.pop() + + +@pytest.mark.parametrize('test_item', TEST_ITEMS, + ids=[ContainersTestItem.get_pretty_id(ti) for ti in TEST_ITEMS]) def test_smart_management_add_tag(provider, test_item): # validate no tag set to project if test_item.obj is ContainersProvider: - pytest.skip("This test is currently skipped due to instability issues. ") obj_inst = provider else: obj_inst = test_item.obj.get_random_instances(provider, count=1).pop() @@ -75,7 +84,7 @@ def test_smart_management_add_tag(provider, test_item): obj_inst.remove_tags(obj_inst.get_tags()) except RuntimeError: # Validate old tags formatting - assert re.match(regex, obj_inst.summary.smart_management.my_company_tags.text_value), \ + assert re.match(regex, wait_for_tag(obj_inst).text_value), \ "Tag formatting is invalid! " # Config random tag for object\ @@ -83,7 +92,7 @@ def test_smart_management_add_tag(provider, test_item): # validate new tag format obj_inst.summary.reload() - tag_display_text = obj_inst.summary.smart_management.my_company_tags.pop() + tag_display_text = wait_for_tag(obj_inst) tag_display_text = tag_display_text.text_value assert re.match(regex, tag_display_text), "Tag formatting is invalid! "