mock.patch

Here are the examples of the python api mock.patch taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

175 Examples 7

Example 151

Project: fuel-plugins Source File: test_validator_v4.py
    @mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
    def test_check_tasks_schema_1_0_validation_passed(self, utils_mock, *args):
        data_sets = [
            [
                {
                    'id': 'task_id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
            ],
            [
                {
                    'id': 'task_id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'id': 'task_id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
            ],
            [
                {
                    'id': 'task_id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'reboot'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'id': 'task_id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'id': 'task_id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                }
            ],
            [
                {
                    'id': 'task_id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'reboot'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'id': 'task_id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'yy',
                        'cmd': 'reboot'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'id': 'task_id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'retries': 10,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'id': 'task_id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'retries': 10,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': 'master'
                },
            ]
        ]

        for data in data_sets:
            utils_mock.parse_yaml.return_value = data
            self.validator.check_deployment_tasks()

Example 152

Project: fuel-plugins Source File: test_validator_v5.py
    @mock.patch('fuel_plugin_builder.validators.base.utils')
    def test_check_node_attributes_schema_validation_failed(self, utils_mock):
        data_sets = [
            {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    '123': {
                        'label': 'Attribute without type',
                        'description': 'Attribute without type',
                        'value': ''
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute_without_label': {
                        'description': 'Attribute without label',
                        'type': 'text',
                        'value': 'attribute_value'
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute_without_value': {
                        'label': 'Attribute without value',
                        'description': 'Attribute without value',
                        'type': 'text',
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute-1': {
                        'description': 'Attribute with wrong label type',
                        'label': 123,
                        'type': 'checkbox',
                        'value': ''
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute-2': {
                        'label': 'Attribute with wrong type type',
                        'type': [],
                        'value': ''
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute-3': {
                        'label': 'Attribute with wrong description type',
                        'type': 'text',
                        'value': '',
                        'description': False
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute-4': {
                        'label': 'Attribute with wrong restrictions type',
                        'type': 'text',
                        'value': '',
                        'restrictions': {}
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'group': 'Metadata without label'
                    },
                    'attribute_a': {
                        'label': 'Some label',
                        'type': 'text',
                        'value': '',
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': None,
                        'group': 'Metadata with wrong label type'
                    },
                    'attribute_a': {
                        'label': 'Some label',
                        'type': 'text',
                        'value': '',
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': None,
                        'group': 'Metadata with wrong restriction type',
                        'restrictions': 'restrictions'
                    },
                    'attribute_a': {
                        'label': 'Some label',
                        'type': 'text',
                        'value': '',
                    }
                }
            }, {
                'metadata': {
                    'label': 'Some label'
                },
                'attribute': {
                    'label': 'Missed plugin section. Wrong level nesting.',
                    'type': 'text',
                    'value': ''
                }
            }, {
                'extra_level': {
                    'plugin_section': {
                        'metadata': {
                            'label': 'Some label'
                        },
                        'attribute-4': {
                            'label': 'Attribute with extra nesting level',
                            'type': 'text',
                            'value': ''
                        }
                    }
                }
            }, {
                'plugin_section': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'uns@pported_letters=!n_attr_name*': {
                        'label': 'Attribute with wrong name',
                        'type': 'text',
                        'value': ''
                    }
                }
            }, {
                'uns@pported_letters=!n_section_name': {
                    'metadata': {
                        'label': 'Some label'
                    },
                    'attribute': {
                        'label': 'Attribute with wrong name',
                        'type': 'text',
                        'value': ''
                    }
                }
            },
            ['wrong interface attributes object type']
        ]

        for data in data_sets:
            utils_mock.parse_yaml.return_value = data
            self.assertRaises(errors.ValidationError,
                              self.validator.check_node_attributes_schema)

Example 153

Project: fuel-web Source File: test_transactions_manager.py
    @mock.patch('nailgun.transactions.manager.rpc')
    def test_execute_few_graphs(self, rpc_mock):
        objects.DeploymentGraph.create_for_model(
            {
                'tasks': [
                    {
                        'id': 'super-mega-other-task',
                        'type': consts.ORCHESTRATOR_TASK_TYPES.puppet,
                        'roles': ['/.*/']
                    },
                ],
                'name': 'test_graph_2',
            },
            instance=self.cluster,
            graph_type='test_graph_2')

        task = self.manager.execute(graphs=[
            {"type": "test_graph"},
            {"type": "test_graph_2"},
        ])

        self.assertItemsEqual(
            ["test_graph", "test_graph_2"],
            [sub.graph_type for sub in task.subtasks])

        # Only a message for the first graph should be sent, because
        # the second graph should be sent by RPC receiver once first
        # one is completed.
        rpc_mock.cast.assert_called_once_with(
            'naily',
            [{
                'args': {
                    'tasks_metadata': self.expected_metadata,
                    'task_uuid': task.subtasks[0].uuid,
                    'tasks_graph': {
                        None: [],
                        self.cluster.nodes[0].uid: [
                            {
                                'id': 'test_task',
                                'type': 'puppet',
                                'fail_on_error': True,
                                'parameters': {'cwd': '/'}
                            },
                        ]
                    },
                    'tasks_directory': {},
                    'dry_run': False,
                    'noop_run': False,
                    'debug': False
                },
                'respond_to': 'transaction_resp',
                'method': 'task_deploy',
                'api_version': '1'
            }])

        # Consider we've got success from Astute.
        self._success(task.subtasks[0].uuid)

        # It's time to send the second graph to execution.
        rpc_mock.cast.assert_called_with(
            'naily',
            [{
                'args': {
                    'tasks_metadata': self.expected_metadata,
                    'task_uuid': task.subtasks[1].uuid,
                    'tasks_graph': {
                        None: [],
                        self.cluster.nodes[0].uid: [
                            {
                                'id': 'super-mega-other-task',
                                'type': 'puppet',
                                'fail_on_error': True,
                                'parameters': {'cwd': '/'}
                            },
                        ]
                    },
                    'tasks_directory': {},
                    'dry_run': False,
                    'noop_run': False,
                    'debug': False
                },
                'respond_to': 'transaction_resp',
                'method': 'task_deploy',
                'api_version': '1'
            }])

        # Consider we've got success from Astute.
        self._success(task.subtasks[1].uuid)
        self._check_timing(task.subtasks[1])
        # Ensure the top leve transaction is ready.
        self.assertEqual(task.status, consts.TASK_STATUSES.ready)

Example 154

Project: group-based-policy Source File: test_lb_agent.py
Function: test_handle_event
    @mock.patch(__name__ + '.test_data.FakeObjects.rpcmgr')
    @mock.patch(__name__ + '.test_data.FakeObjects.sc')
    def _test_handle_event(self, sc, rpcmgr):
        """ Tests all create/update/delete operation of LBaaSEventHandler of
        loadbalancer agent.

        Returns: none

        """

        agent = self._get_lb_handler_objects(sc, self.drivers, rpcmgr)
        driver = self.drivers['loadbalancer']

        with mock.patch.object(
                agent, '_get_driver', return_value=driver), (
            mock.patch.object(
                driver, 'create_vip')) as mock_create_vip, (
            mock.patch.object(
                driver, 'delete_vip')) as mock_delete_vip, (
            mock.patch.object(
                driver, 'update_vip')) as mock_update_vip, (
            mock.patch.object(
                driver, 'create_pool')) as mock_create_pool, (
            mock.patch.object(
                driver, 'delete_pool')) as mock_delete_pool, (
            mock.patch.object(
                driver, 'update_pool')) as mock_update_pool, (
            mock.patch.object(
                driver, 'create_member')) as mock_create_member, (
            mock.patch.object(
                driver, 'delete_member')) as mock_delete_member, (
            mock.patch.object(
                driver, 'update_member')) as mock_update_member, (
            mock.patch.object(
                driver, 'create_pool_health_monitor')) as mock_create_poolhm, (
            mock.patch.object(
                driver, 'delete_pool_health_monitor')) as mock_delete_poolhm, (
            mock.patch.object(
                driver, 'update_pool_health_monitor')) as mock_update_poolhm:

            vip = self.fo._get_vip_object()[0]
            old_vip = self.fo._get_vip_object()[0]
            pool = self.fo._get_pool_object()[0]
            old_pool = self.fo._get_pool_object()[0]
            member = self.fo._get_member_object()[0]
            old_member = self.fo._get_member_object()[0]
            hm = self.fo._get_hm_object()[0]
            old_hm = self.fo._get_hm_object()[0]
            pool_id = '6350c0fd-07f8-46ff-b797-62acd23760de'
            agent.handle_event(self.ev)

            if self.ev.id == 'CREATE_VIP':
                mock_create_vip.assert_called_with(vip, self.fo.vip_context)
            elif self.ev.id == 'DELETE_VIP':
                mock_delete_vip.assert_called_with(vip, self.fo.vip_context)
            elif self.ev.id == 'UPDATE_VIP':
                mock_update_vip.assert_called_with(
                    old_vip, vip, self.fo.vip_context)
            elif self.ev.id == 'CREATE_POOL':
                mock_create_pool.assert_called_with(
                    pool, self.fo.vip_context)
            elif self.ev.id == 'DELETE_POOL':
                mock_delete_pool.assert_called_with(
                    pool, self.fo.vip_context)
            elif self.ev.id == 'UPDATE_POOL':
                mock_update_pool.assert_called_with(
                    old_pool, pool, self.fo.vip_context)
            elif self.ev.id == 'CREATE_MEMBER':
                mock_create_member.assert_called_with(
                    member, self.fo.context_test)
            elif self.ev.id == 'DELETE_MEMBER':
                mock_delete_member.assert_called_with(
                    member, self.fo.context_test)
            elif self.ev.id == 'UPDATE_MEMBER':
                mock_update_member.assert_called_with(
                    old_member, member, self.fo.context_test)
            elif self.ev.id == 'CREATE_POOL_HEALTH_MONITOR':
                mock_create_poolhm.assert_called_with(
                    hm, pool_id, self.fo.context_test)
            elif self.ev.id == 'DELETE_POOL_HEALTH_MONITOR':
                mock_delete_poolhm.assert_called_with(
                    hm, pool_id, self.fo.context_test)
            elif self.ev.id == 'UPDATE_POOL_HEALTH_MONITOR':
                mock_update_poolhm.assert_called_with(
                    old_hm, hm, pool_id, self.fo.context_test)

Example 155

Project: ironic Source File: test_pxe.py
    @mock.patch('ironic.common.image_service.GlanceImageService',
                autospec=True)
    @mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
    def _test_build_pxe_config_options_ipxe(self, build_pxe_mock, glance_mock,
                                            whle_dsk_img=False,
                                            ipxe_timeout=0,
                                            ipxe_use_swift=False):
        self.config(pxe_append_params='test_param', group='pxe')
        # NOTE: right '/' should be removed from url string
        self.config(api_url='http://192.168.122.184:6385', group='conductor')
        self.config(ipxe_timeout=ipxe_timeout, group='pxe')
        root_dir = CONF.deploy.http_root

        driver_internal_info = self.node.driver_internal_info
        driver_internal_info['is_whole_disk_image'] = whle_dsk_img
        self.node.driver_internal_info = driver_internal_info
        self.node.save()

        tftp_server = CONF.pxe.tftp_server

        http_url = 'http://192.1.2.3:1234'
        self.config(ipxe_enabled=True, group='pxe')
        self.config(http_url=http_url, group='deploy')
        if ipxe_use_swift:
            self.config(ipxe_use_swift=True, group='pxe')
            glance = mock.Mock()
            glance_mock.return_value = glance
            glance.swift_temp_url.side_effect = [
                deploy_kernel, deploy_ramdisk] = [
                'swift_kernel', 'swift_ramdisk']
            image_info = {
                'deploy_kernel': (uuidutils.generate_uuid(),
                                  os.path.join(root_dir,
                                               self.node.uuid,
                                               'deploy_kernel')),
                'deploy_ramdisk': (uuidutils.generate_uuid(),
                                   os.path.join(root_dir,
                                                self.node.uuid,
                                                'deploy_ramdisk'))
            }
        else:
            deploy_kernel = os.path.join(http_url, self.node.uuid,
                                         'deploy_kernel')
            deploy_ramdisk = os.path.join(http_url, self.node.uuid,
                                          'deploy_ramdisk')
            image_info = {
                'deploy_kernel': ('deploy_kernel',
                                  os.path.join(root_dir,
                                               self.node.uuid,
                                               'deploy_kernel')),
                'deploy_ramdisk': ('deploy_ramdisk',
                                   os.path.join(root_dir,
                                                self.node.uuid,
                                                'deploy_ramdisk'))
            }

        kernel = os.path.join(http_url, self.node.uuid, 'kernel')
        ramdisk = os.path.join(http_url, self.node.uuid, 'ramdisk')
        if (whle_dsk_img or
            deploy_utils.get_boot_option(self.node) == 'local'):
                ramdisk = 'no_ramdisk'
                kernel = 'no_kernel'
        else:
            image_info.update({
                'kernel': ('kernel_id',
                           os.path.join(root_dir,
                                        self.node.uuid,
                                        'kernel')),
                'ramdisk': ('ramdisk_id',
                            os.path.join(root_dir,
                                         self.node.uuid,
                                         'ramdisk'))
            })

        ipxe_timeout_in_ms = ipxe_timeout * 1000

        expected_options = {
            'ari_path': ramdisk,
            'deployment_ari_path': deploy_ramdisk,
            'pxe_append_params': 'test_param',
            'aki_path': kernel,
            'deployment_aki_path': deploy_kernel,
            'tftp_server': tftp_server,
            'ipxe_timeout': ipxe_timeout_in_ms,
        }

        with task_manager.acquire(self.context, self.node.uuid,
                                  shared=True) as task:
            options = pxe._build_pxe_config_options(task, image_info)
        self.assertEqual(expected_options, options)

Example 156

Project: networking-calico Source File: lib.py
Function: set_up
    def setUp(self):
        # Announce the current test case.
        _log.info("TEST CASE: %s", self.id())

        # Mock calls to sys.exit.
        self.sys_exit_p = mock.patch("sys.exit")
        self.sys_exit_p.start()

        # Hook eventlet.
        self.setUp_eventlet()

        # Hook logging.
        self.setUp_logging()

        # If an arg mismatch occurs, we want to see the complete diff of it.
        self.maxDiff = None

        # Create an instance of CalicoMechanismDriver.
        mech_calico.mech_driver = None
        self.driver = mech_calico.CalicoMechanismDriver()

        # Hook the (mock) Neutron database.
        self.db = mech_calico.manager.NeutronManager.get_plugin()
        self.db_context = mech_calico.ctx.get_admin_context()

        self.db_context.session.query.return_value.filter_by.side_effect = (
            self.port_query
        )

        # Arrange what the DB's get_ports will return.
        self.db.get_ports.side_effect = self.get_ports
        self.db.get_port.side_effect = self.get_port

        # Arrange DB's get_subnet and get_subnets calls.
        self.db.get_subnet.side_effect = self.get_subnet
        self.db.get_subnets.side_effect = self.get_subnets

        # Arrange what the DB's get_security_groups query will return (the
        # default SG).
        self.db.get_security_groups.return_value = [
            {'id': 'SGID-default',
             'security_group_rules': [
                 {'remote_group_id': 'SGID-default',
                  'remote_ip_prefix': None,
                  'protocol': -1,
                  'direction': 'ingress',
                  'ethertype': 'IPv4',
                  'port_range_min': -1},
                 {'remote_group_id': 'SGID-default',
                  'remote_ip_prefix': None,
                  'protocol': -1,
                  'direction': 'ingress',
                  'ethertype': 'IPv6',
                  'port_range_min': -1},
                 {'remote_group_id': None,
                  'remote_ip_prefix': None,
                  'protocol': -1,
                  'direction': 'egress',
                  'ethertype': 'IPv4',
                  'port_range_min': -1},
                 {'remote_group_id': None,
                  'remote_ip_prefix': None,
                  'protocol': -1,
                  'direction': 'egress',
                  'ethertype': 'IPv6',
                  'port_range_min': -1}
             ]}
        ]
        self.db.get_security_group_rules.return_value = [
            {'remote_group_id': 'SGID-default',
             'remote_ip_prefix': None,
             'protocol': -1,
             'direction': 'ingress',
             'ethertype': 'IPv4',
             'security_group_id': 'SGID-default',
             'port_range_min': -1},
            {'remote_group_id': 'SGID-default',
             'remote_ip_prefix': None,
             'protocol': -1,
             'direction': 'ingress',
             'ethertype': 'IPv6',
             'security_group_id': 'SGID-default',
             'port_range_min': -1},
            {'remote_group_id': None,
             'remote_ip_prefix': None,
             'protocol': -1,
             'direction': 'egress',
             'ethertype': 'IPv4',
             'security_group_id': 'SGID-default',
             'port_range_min': -1},
            {'remote_group_id': None,
             'remote_ip_prefix': None,
             'protocol': -1,
             'direction': 'egress',
             'security_group_id': 'SGID-default',
             'ethertype': 'IPv6',
             'port_range_min': -1}
        ]

        self.db._get_port_security_group_bindings.side_effect = (
            self.get_port_security_group_bindings
        )

        self.port_security_group_bindings = [
            {'port_id': 'DEADBEEF-1234-5678',
             'security_group_id': 'SGID-default'},
            {'port_id': 'FACEBEEF-1234-5678',
             'security_group_id': 'SGID-default'},
            {'port_id': 'HELLO-1234-5678',
             'security_group_id': 'SGID-default'},
        ]

Example 157

Project: networking-fortinet Source File: test_fortinet_mechanism_driver.py
    @mock.patch('networking_fortinet.db.models.Fortinet_Firewall_Address')
    @mock.patch('networking_fortinet.common.resources.FirewallAddress')
    @mock.patch('networking_fortinet.common.resources.FirewallAddrgrp')
    @mock.patch('networking_fortinet.db.models.Fortinet_Firewall_Policy')
    @mock.patch('networking_fortinet.common.resources.FirewallPolicy')
    @mock.patch('networking_fortinet.db.models.Fortinet_Vlink_Vlan_Allocation')
    @mock.patch('networking_fortinet.db.models.Fortinet_Vlink_IP_Allocation')
    @mock.patch('networking_fortinet.db.models.Fortinet_Vdom_Vlink')
    @mock.patch('networking_fortinet.common.resources.VdomLink')
    @mock.patch('networking_fortinet.common.resources.VlanInterface')
    @mock.patch('networking_fortinet.db.models.Fortinet_Static_Router')
    @mock.patch('networking_fortinet.common.resources.RouterStatic')
    @mock.patch('networking_fortinet.db.models.Fortinet_Firewall_IPPool')
    @mock.patch('networking_fortinet.common.resources.FirewallIppool')
    @mock.patch('networking_fortinet.db.models.Fortinet_ML2_ReservedIP')
    @mock.patch('networking_fortinet.common.resources.DhcpServerRsvAddr')
    @mock.patch('networking_fortinet.db.models.Fortinet_Interface_subip')
    def test_create_port_precommit_and_del_port_postcommit(self,
                                   Fortinet_Firewall_Address,
                                   FirewallAddress, FirewallAddrgrp,
                                   Fortinet_Firewall_Policy, FirewallPolicy,
                                   Fortinet_Vlink_Vlan_Allocation,
                                   Fortinet_Vlink_IP_Allocation,
                                   Fortinet_Vdom_Vlink, VdomLink,
                                   VlanInterface,
                                   Fortinet_Static_Router, RouterStatic,
                                   Fortinet_Firewall_IPPool, FirewallIppool,
                                   Fortinet_ML2_ReservedIP,
                                   DhcpServerRsvAddr,
                                   Fortinet_Interface_subip):
        self.driver.initialize()
        mech_context = self._setup_port_context()
        namespace = mock.Mock()
        namespace.vdom = 'osvdm1234'
        subnet = mock.Mock()
        subnet.cidr = '172.20.21.0/24'
        subnet.edit_id = '123'
        subnet.vdom = 'osvdm123'
        fwaddr = mock.Mock()
        fwaddr.name = 'cool'
        fwaddr.group = 'addrgrp1'
        fwpolicy = mock.Mock()
        fwpolicy.edit_id = '123'
        fwpolicy.vdom = 'osvdm123'
        fwippool = mock.Mock()
        fwippool.edit_id = '123'
        fwippool.vdom = 'osvdmext'
        fwippool.name = '172.20.21.1'
        router = mock.Mock()
        router.tenant_id = 'test'
        router.edit_id = '123'
        router.vdom = 'osvdm123'
        router.gw_port_id = None
        vlink = mock.Mock()
        vlink.inf_name_ext_vdom = 'vlink_1'
        vlink.id = '1234'
        vlink.ip = '169.254.0.10'
        vlink.edit_id = '123'
        vlink.vdom = 'osvdm123'
        vlink.inf_name_int_vdom = 'vlink_0'
        fgt_intf = mock.Mock()
        fgt_intf.name = 'port32'
        fgt_intf.ip = '1.1.1.1'
        subip = mock.Mock()
        subip.ip = '172.20.21.1 255.255.255.0'
        reserveip = mock.Mock()
        reserveip.edit_id = '123'
        reserveip.ip = '172.20.21.123'
        reserveip.mac = 'aa:aa:aa:aa:aa:aa'
        with mock.patch('networking_fortinet.db.models.query_record',
                        side_effect=[namespace, subnet, fwaddr]):
            with mock.patch('networking_fortinet.db.models.query_records',
                        side_effect=[[fwaddr]]):
                self.driver.create_port_precommit(mech_context)
        with mock.patch('networking_fortinet.db.models.query_record',
                        side_effect=[subnet, subnet, fwpolicy, fwaddr]):
            with mock.patch('networking_fortinet.db.models.query_records',
                        side_effect=[[fwaddr]]):
                self.driver.delete_port_postcommit(mech_context)
        mech_context.current['device_owner'] = 'network:router_gateway'
        with mock.patch('networking_fortinet.db.models.query_record',
                        side_effect=[namespace, subnet, 'external_net',
                                     router, vlink, subnet, fgt_intf]):
            with mock.patch('networking_fortinet.common.utils.getip',
                            side_effect=['169.254.0.10', '160.254.0.11']):
                    with mock.patch(
                        'networking_fortinet.db.models.query_records',
                        side_effect=[[subip]]):
                        self.driver.create_port_precommit(mech_context)
        with mock.patch('networking_fortinet.db.models.query_record',
                        side_effect=[subnet, subnet, 'external', subnet,
                                     fwpolicy, fwippool, router, namespace,
                                     vlink, vlink.ip, router, vlink,
                                     namespace]):
            with mock.patch('networking_fortinet.db.models.query_records',
                            side_effect=[[subip], [router]]):
                with mock.patch('networking_fortinet.db.models.query_count',
                                return_value=0):
                    self.driver.delete_port_postcommit(mech_context)
        mech_context.current['device_owner'] = 'network:compute:None'
        with mock.patch('networking_fortinet.db.models.query_record',
                        side_effect=[namespace, subnet, [reserveip], subnet]):
            self.driver.create_port_precommit(mech_context)
        with mock.patch('networking_fortinet.db.models.query_records',
                        side_effect=[[reserveip]]):
            with mock.patch('networking_fortinet.db.models.query_record',
                            side_effect=[subnet] * 3):
                self.driver.delete_port_postcommit(mech_context)

Example 158

Project: oslo.service Source File: test_periodic.py
    @mock.patch('oslo_service.periodic_task.now')
    def test_called_thrice(self, mock_now):

        time = 340
        mock_now.return_value = time

        # Class inside test def to mock 'now' in
        # the periodic task decorator
        class AService(periodic_task.PeriodicTasks):
            def __init__(self, conf):
                super(AService, self).__init__(conf)
                self.called = {'doit': 0, 'urg': 0, 'ticks': 0, 'tocks': 0}

            @periodic_task.periodic_task
            def doit(self, context):
                self.called['doit'] += 1

            @periodic_task.periodic_task
            def crashit(self, context):
                self.called['urg'] += 1
                raise AnException('urg')

            @periodic_task.periodic_task(
                spacing=10 + periodic_task.DEFAULT_INTERVAL,
                run_immediately=True)
            def doit_with_ticks(self, context):
                self.called['ticks'] += 1

            @periodic_task.periodic_task(
                spacing=10 + periodic_task.DEFAULT_INTERVAL)
            def doit_with_tocks(self, context):
                self.called['tocks'] += 1

        external_called = {'ext1': 0, 'ext2': 0}

        @periodic_task.periodic_task
        def ext1(self, context):
            external_called['ext1'] += 1

        @periodic_task.periodic_task(
            spacing=10 + periodic_task.DEFAULT_INTERVAL)
        def ext2(self, context):
            external_called['ext2'] += 1

        serv = AService(self.conf)
        serv.add_periodic_task(ext1)
        serv.add_periodic_task(ext2)
        serv.run_periodic_tasks(None)
        # Time: 340
        self.assertEqual(0, serv.called['doit'])
        self.assertEqual(0, serv.called['urg'])
        # New last run will be 350
        self.assertEqual(1, serv.called['ticks'])
        self.assertEqual(0, serv.called['tocks'])
        self.assertEqual(0, external_called['ext1'])
        self.assertEqual(0, external_called['ext2'])

        time = time + periodic_task.DEFAULT_INTERVAL
        mock_now.return_value = time
        serv.run_periodic_tasks(None)

        # Time:400
        # New Last run: 420
        self.assertEqual(1, serv.called['doit'])
        self.assertEqual(1, serv.called['urg'])
        # Closest multiple of 70 is 420
        self.assertEqual(1, serv.called['ticks'])
        self.assertEqual(0, serv.called['tocks'])
        self.assertEqual(1, external_called['ext1'])
        self.assertEqual(0, external_called['ext2'])

        time = time + periodic_task.DEFAULT_INTERVAL / 2
        mock_now.return_value = time
        serv.run_periodic_tasks(None)
        self.assertEqual(1, serv.called['doit'])
        self.assertEqual(1, serv.called['urg'])
        self.assertEqual(2, serv.called['ticks'])
        self.assertEqual(1, serv.called['tocks'])
        self.assertEqual(1, external_called['ext1'])
        self.assertEqual(1, external_called['ext2'])

        time = time + periodic_task.DEFAULT_INTERVAL
        mock_now.return_value = time
        serv.run_periodic_tasks(None)
        self.assertEqual(2, serv.called['doit'])
        self.assertEqual(2, serv.called['urg'])
        self.assertEqual(3, serv.called['ticks'])
        self.assertEqual(2, serv.called['tocks'])
        self.assertEqual(2, external_called['ext1'])
        self.assertEqual(2, external_called['ext2'])

Example 159

Project: python-ceilometerclient Source File: test_utils.py
    def test_prettytable(self):
        class Struct(object):
            def __init__(self, **entries):
                self.__dict__.update(entries)

        # test that the prettytable output is wellformatted (left-aligned)
        with mock.patch('sys.stdout', new=six.StringIO()) as stdout:
            utils.print_dict({'K': 'k', 'Key': 'Value'})
            self.assertEqual('''\
+----------+-------+
| Property | Value |
+----------+-------+
| K        | k     |
| Key      | Value |
+----------+-------+
''', stdout.getvalue())

        with mock.patch('sys.stdout', new=six.StringIO()) as stdout:
            utils.print_dict({'alarm_id': '262567fd-d79a-4bbb-a9d0-59d879b6',
                              'name': u'\u6d4b\u8bd5',
                              'description': u'\u6d4b\u8bd5',
                              'state': 'insufficient data',
                              'repeat_actions': 'False',
                              'type': 'threshold',
                              'threshold': '1.0',
                              'statistic': 'avg',
                              'alarm_actions': [u'http://something/alarm1',
                                                u'http://something/alarm2'],
                              'ok_actions': [{"get_attr1":
                                              [u"web_server_scaleup_policy1",
                                               u"alarm_url1"]},
                                             {"get_attr2":
                                              [u"web_server_scaleup_policy2",
                                               u"alarm_url2"]}],
                              'time_constraints': '[{name: c1,'
                                                  '\\n  description: test,'
                                                  '\\n  start: 0 18 * * *,'
                                                  '\\n  duration: 1,'
                                                  '\\n  timezone: US}]'},
                             wrap=72)
            expected = u'''\
+------------------+-------------------------------------------------------\
--------+
| Property         | Value                                                 \
        |
+------------------+-------------------------------------------------------\
--------+
| alarm_actions    | ["http://something/alarm1", "http://something/alarm2"]\
        |
| alarm_id         | 262567fd-d79a-4bbb-a9d0-59d879b6                      \
        |
| description      | \u6d4b\u8bd5                                          \
                |
| name             | \u6d4b\u8bd5                                          \
                |
| ok_actions       | [{"get_attr1": ["web_server_scaleup_policy1", "alarm_u\
rl1"]}, |
|                  | {"get_attr2": ["web_server_scaleup_policy2", "alarm_ur\
l2"]}]  |
| repeat_actions   | False                                                 \
        |
| state            | insufficient data                                     \
        |
| statistic        | avg                                                   \
        |
| threshold        | 1.0                                                   \
        |
| time_constraints | [{name: c1,                                           \
        |
|                  |   description: test,                                  \
        |
|                  |   start: 0 18 * * *,                                  \
        |
|                  |   duration: 1,                                        \
        |
|                  |   timezone: US}]                                      \
        |
| type             | threshold                                             \
        |
+------------------+-------------------------------------------------------\
--------+
'''
            # py2 prints str type, py3 prints unicode type
            if six.PY2:
                expected = expected.encode('utf-8')
            self.assertEqual(expected, stdout.getvalue())

Example 160

Project: sahara Source File: test_job_utils.py
    @mock.patch('sahara.context.ctx')
    @mock.patch('sahara.conductor.API.data_source_get_all')
    def test_resolve_data_source_refs(self, data_source_get_all, ctx):

        ctx.return_value = 'dummy'

        name_ref = job_utils.DATA_SOURCE_PREFIX+'input'
        job_exec_id = uuidutils.generate_uuid()

        input_url = "swift://container/input"
        input = u.create_data_source(input_url,
                                     name="input",
                                     id=uuidutils.generate_uuid())

        output = u.create_data_source("swift://container/output.%JOB_EXEC_ID%",
                                      name="output",
                                      id=uuidutils.generate_uuid())
        output_url = "swift://container/output." + job_exec_id

        by_name = {'input': input,
                   'output': output}

        by_id = {input.id: input,
                 output.id: output}

        # Pretend to be the database
        def _get_all(ctx, **kwargs):
            name = kwargs.get('name')
            if name in by_name:
                name_list = [by_name[name]]
            else:
                name_list = []

            id = kwargs.get('id')
            if id in by_id:
                id_list = [by_id[id]]
            else:
                id_list = []
            return list(set(name_list + id_list))

        data_source_get_all.side_effect = _get_all

        job_configs = {
            'configs': {
                job_utils.DATA_SOURCE_SUBST_NAME: True,
                job_utils.DATA_SOURCE_SUBST_UUID: True},
            'args': [name_ref, output.id, input.id]}
        urls = {}
        ds, nc = job_utils.resolve_data_source_references(job_configs,
                                                          job_exec_id, urls)
        self.assertEqual(2, len(ds))
        self.assertEqual([input.url, output_url, input.url], nc['args'])
        # Swift configs should be filled in since they were blank
        self.assertEqual(input.credentials['user'],
                         nc['configs']['fs.swift.service.sahara.username'])
        self.assertEqual(input.credentials['password'],
                         nc['configs']['fs.swift.service.sahara.password'])
        self.assertEqual(2, len(urls))
        self.assertItemsEqual({input.id: (input_url, input_url),
                               output.id: (output_url, output_url)}, urls)

        job_configs['configs'] = {'fs.swift.service.sahara.username': 'sam',
                                  'fs.swift.service.sahara.password': 'gamgee',
                                  job_utils.DATA_SOURCE_SUBST_NAME: False,
                                  job_utils.DATA_SOURCE_SUBST_UUID: True}
        ds, nc = job_utils.resolve_data_source_references(job_configs,
                                                          job_exec_id, {})
        self.assertEqual(2, len(ds))
        self.assertEqual([name_ref, output_url, input.url], nc['args'])
        # Swift configs should not be overwritten
        self.assertEqual(job_configs['configs'], nc['configs'])

        job_configs['configs'] = {job_utils.DATA_SOURCE_SUBST_NAME: True,
                                  job_utils.DATA_SOURCE_SUBST_UUID: False}
        job_configs['proxy_configs'] = {'proxy_username': 'john',
                                        'proxy_password': 'smith',
                                        'proxy_trust_id': 'trustme'}
        ds, nc = job_utils.resolve_data_source_references(job_configs,
                                                          job_exec_id, {})
        self.assertEqual(1, len(ds))
        self.assertEqual([input.url, output.id, input.id], nc['args'])

        # Swift configs should be empty and proxy configs should be preserved
        self.assertEqual(job_configs['configs'], nc['configs'])
        self.assertEqual(job_configs['proxy_configs'], nc['proxy_configs'])

        # Substitution not enabled
        job_configs['configs'] = {job_utils.DATA_SOURCE_SUBST_NAME: False,
                                  job_utils.DATA_SOURCE_SUBST_UUID: False}
        ds, nc = job_utils.resolve_data_source_references(job_configs,
                                                          job_exec_id, {})
        self.assertEqual(0, len(ds))
        self.assertEqual(job_configs['args'], nc['args'])
        self.assertEqual(job_configs['configs'], nc['configs'])

        # Substitution enabled but no values to modify
        job_configs['configs'] = {job_utils.DATA_SOURCE_SUBST_NAME: True,
                                  job_utils.DATA_SOURCE_SUBST_UUID: True}
        job_configs['args'] = ['val1', 'val2', 'val3']
        ds, nc = job_utils.resolve_data_source_references(job_configs,
                                                          job_exec_id, {})
        self.assertEqual(0, len(ds))
        self.assertEqual(nc['args'], job_configs['args'])
        self.assertEqual(nc['configs'], job_configs['configs'])

Example 161

Project: sahara Source File: utils.py
def start_patch(patch_templates=True):
    get_clusters_p = mock.patch("sahara.service.api.v10.get_clusters")
    get_cluster_p = mock.patch("sahara.service.api.v10.get_cluster")
    if patch_templates:
        get_ng_templates_p = mock.patch(
            "sahara.service.api.v10.get_node_group_templates")
        get_ng_template_p = mock.patch(
            "sahara.service.api.v10.get_node_group_template")
    if patch_templates:
        get_cl_templates_p = mock.patch(
            "sahara.service.api.v10.get_cluster_templates")
        get_cl_template_p = mock.patch(
            "sahara.service.api.v10.get_cluster_template")
    nova_p = mock.patch("sahara.utils.openstack.nova.client")
    heat_p = mock.patch("sahara.utils.openstack.heat.client")
    image_manager_p = mock.patch(
        "sahara.utils.openstack.images.SaharaImageManager")
    cinder_p = mock.patch("sahara.utils.openstack.cinder.client")
    cinder_exists_p = mock.patch(
        "sahara.utils.openstack.cinder.check_cinder_exists")
    get_image_p = mock.patch("sahara.service.api.v10.get_image")

    get_image = get_image_p.start()
    get_clusters = get_clusters_p.start()
    get_cluster = get_cluster_p.start()
    if patch_templates:
        get_ng_templates = get_ng_templates_p.start()
        get_ng_template = get_ng_template_p.start()
    if patch_templates:
        get_cl_templates = get_cl_templates_p.start()
        get_cl_template_p.start()

    nova = nova_p.start()

    if patch_templates:
        get_cl_templates.return_value = []

    nova().flavors.list.side_effect = _get_flavors_list
    nova().security_groups.list.side_effect = _get_security_groups_list
    nova().keypairs.get.side_effect = _get_keypair
    nova().networks.find.side_effect = _get_network
    nova().networks.find.__name__ = 'find'
    nova().floating_ip_pools.list.side_effect = _get_fl_ip_pool_list
    nova().availability_zones.list.side_effect = _get_availability_zone_list

    heat = heat_p.start()
    heat().stacks.list.side_effect = _get_heat_stack_list

    image_manager = image_manager_p.start()

    cinder = cinder_p.start()
    cinder().availability_zones.list.side_effect = _get_availability_zone_list

    cinder_exists = cinder_exists_p.start()
    cinder_exists.return_value = True

    class Image(object):
        def __init__(self, name='test'):
            self.name = name

        @property
        def id(self):
            if self.name == 'test':
                return '550e8400-e29b-41d4-a716-446655440000'
            else:
                return '813fe450-40d2-4acc-ade5-ea753a1bd5bc'

        @property
        def tags(self):
            if self.name == 'test':
                return ['fake', '0.1']
            else:
                return ['fake', 'wrong_tag']

    def _get_image(id):
        if id == '550e8400-e29b-41d4-a716-446655440000':
            return Image()
        else:
            return Image('wrong_test')

    get_image.side_effect = _get_image
    image_manager().list_registered.return_value = [Image(),
                                                    Image(name='wrong_name')]
    ng_dict = tu.make_ng_dict('ng', '42', ['namenode'], 1)
    cluster = tu.create_cluster('test', 't', 'fake', '0.1', [ng_dict],
                                id=1, status=c_u.CLUSTER_STATUS_ACTIVE)
    # stub clusters list
    get_clusters.return_value = [cluster]
    get_cluster.return_value = cluster

    # stub node templates
    if patch_templates:
        ngt_dict = {'name': 'test', 'tenant_id': 't', 'flavor_id': '42',
                    'plugin_name': 'fake', 'hadoop_version': '0.1',
                    'id': '550e8400-e29b-41d4-a716-446655440000',
                    'node_processes': ['namenode']}

        get_ng_templates.return_value = [r.NodeGroupTemplateResource(ngt_dict)]

        ct_dict = {'name': 'test', 'tenant_id': 't',
                   'plugin_name': 'fake', 'hadoop_version': '0.1'}

        get_cl_templates.return_value = [r.ClusterTemplateResource(ct_dict)]

    def _get_ng_template(id):
        for template in get_ng_templates():
            if template.id == id:
                return template
        return None

    if patch_templates:
        get_ng_template.side_effect = _get_ng_template
    # request data to validate
    patchers = [get_clusters_p, get_cluster_p,
                nova_p, get_image_p, heat_p, image_manager_p, cinder_p,
                cinder_exists_p]
    if patch_templates:
        patchers.extend([get_ng_template_p, get_ng_templates_p,
                         get_cl_template_p, get_cl_templates_p])
    return patchers

Example 162

Project: sahara-dashboard Source File: tests.py
    @test.create_stubs({api.sahara: ('client',
                                     'nodegroup_template_create',
                                     'plugin_get_version_details'),
                        dash_api.network: ('floating_ip_pools_list',
                                           'security_group_list'),
                        dash_api.nova: ('flavor_list',
                                        'availability_zone_list'),
                        dash_api.cinder: ('extension_supported',
                                          'availability_zone_list',
                                          'volume_type_list')})
    @mock.patch('openstack_dashboard.api.base.is_service_enabled')
    def test_create(self, service_checker):
        service_checker.return_value = True
        flavor = self.flavors.first()
        ngt = self.nodegroup_templates.first()
        configs = self.plugins_configs.first()
        new_name = ngt.name + '-new'
        self.mox.StubOutWithMock(
            workflow_helpers, 'parse_configs_from_context')

        dash_api.cinder.extension_supported(IsA(http.HttpRequest),
                                            'AvailabilityZones') \
            .AndReturn(True)
        dash_api.cinder.availability_zone_list(IsA(http.HttpRequest))\
            .AndReturn(self.availability_zones.list())
        dash_api.cinder.volume_type_list(IsA(http.HttpRequest))\
            .AndReturn([])
        dash_api.nova.flavor_list(IsA(http.HttpRequest)).AndReturn([flavor])
        api.sahara.plugin_get_version_details(IsA(http.HttpRequest),
                                              ngt.plugin_name,
                                              ngt.hadoop_version) \
            .MultipleTimes().AndReturn(configs)
        dash_api.network.floating_ip_pools_list(IsA(http.HttpRequest)) \
            .AndReturn([])
        dash_api.network.security_group_list(IsA(http.HttpRequest)) \
            .AndReturn([])
        workflow_helpers.parse_configs_from_context(
            IgnoreArg(), IgnoreArg()).AndReturn({})
        api.sahara.nodegroup_template_create(
            IsA(http.HttpRequest),
            **{'name': new_name,
               'plugin_name': ngt.plugin_name,
               'hadoop_version': ngt.hadoop_version,
               'description': ngt.description,
               'flavor_id': flavor.id,
               'volumes_per_node': None,
               'volumes_size': None,
               'volume_type': None,
               'volume_local_to_instance': False,
               'volumes_availability_zone': None,
               'node_processes': ['namenode'],
               'node_configs': {},
               'floating_ip_pool': None,
               'security_groups': [],
               'image_id': None,
               'auto_security_group': True,
               'availability_zone': None,
               'is_proxy_gateway': False,
               'use_autoconfig': True,
               'shares': [],
               'is_public': False,
               'is_protected': False})\
            .AndReturn(True)

        self.mox.ReplayAll()

        res = self.client.post(
            CREATE_URL,
            {'nodegroup_name': new_name,
             'plugin_name': ngt.plugin_name,
             ngt.plugin_name + '_version': '1.2.1',
             'hadoop_version': ngt.hadoop_version,
             'description': ngt.description,
             'flavor': flavor.id,
             'availability_zone': None,
             'storage': 'ephemeral_drive',
             'volumes_per_node': 0,
             'volumes_size': 0,
             'volume_type': None,
             'volume_local_to_instance': False,
             'volumes_availability_zone': None,
             'floating_ip_pool': None,
             'security_autogroup': True,
             'processes': 'HDFS:namenode',
             'use_autoconfig': True,
             'shares': [],
             'is_public': False,
             'is_protected': False})

        self.assertNoFormErrors(res)
        self.assertRedirectsNoFollow(res, INDEX_URL)
        self.assertMessageCount(success=1)

Example 163

Project: sahara-dashboard Source File: tests.py
    @test.create_stubs({api.sahara: ('client',
                                     'nodegroup_template_create',
                                     'nodegroup_template_update',
                                     'nodegroup_template_get',
                                     'plugin_get_version_details'),
                        dash_api.network: ('floating_ip_pools_list',
                                           'security_group_list'),
                        dash_api.nova: ('flavor_list',
                                        'availability_zone_list'),
                        dash_api.cinder: ('extension_supported',
                                          'availability_zone_list',
                                          'volume_type_list')})
    @mock.patch('openstack_dashboard.api.base.is_service_enabled')
    def test_update(self, service_checker):
        service_checker.return_value = True
        flavor = self.flavors.first()
        ngt = self.nodegroup_templates.first()
        configs = self.plugins_configs.first()
        new_name = ngt.name + '-updated'
        UPDATE_URL = reverse(
            'horizon:project:data_processing.clusters:edit',
            kwargs={'template_id': ngt.id})
        self.mox.StubOutWithMock(
            workflow_helpers, 'parse_configs_from_context')

        dash_api.cinder.extension_supported(IsA(http.HttpRequest),
                                            'AvailabilityZones') \
            .AndReturn(True)
        dash_api.cinder.availability_zone_list(IsA(http.HttpRequest)) \
            .AndReturn(self.availability_zones.list())
        dash_api.cinder.volume_type_list(IsA(http.HttpRequest))\
            .AndReturn([])
        dash_api.nova.flavor_list(IsA(http.HttpRequest)).AndReturn([flavor])
        api.sahara.plugin_get_version_details(IsA(http.HttpRequest),
                                              ngt.plugin_name,
                                              ngt.hadoop_version) \
            .MultipleTimes().AndReturn(configs)
        dash_api.network.floating_ip_pools_list(IsA(http.HttpRequest)) \
            .AndReturn([])
        dash_api.network.security_group_list(IsA(http.HttpRequest)) \
            .AndReturn([])
        workflow_helpers.parse_configs_from_context(
            IgnoreArg(), IgnoreArg()).AndReturn({})
        api.sahara.nodegroup_template_get(IsA(http.HttpRequest),
                                          ngt.id) \
            .AndReturn(ngt)
        api.sahara.nodegroup_template_update(
            request=IsA(http.HttpRequest),
            ngt_id=ngt.id,
            name=new_name,
            plugin_name=ngt.plugin_name,
            hadoop_version=ngt.hadoop_version,
            flavor_id=flavor.id,
            description=ngt.description,
            volumes_per_node=0,
            volumes_size=None,
            volume_type=None,
            volume_local_to_instance=False,
            volumes_availability_zone=None,
            node_processes=['namenode'],
            node_configs={},
            floating_ip_pool=None,
            security_groups=[],
            auto_security_group=True,
            availability_zone=None,
            use_autoconfig=True,
            is_proxy_gateway=False,
            shares=[],
            is_protected=False,
            is_public=False,
            image_id=ngt.image_id).AndReturn(True)

        self.mox.ReplayAll()

        res = self.client.post(
            UPDATE_URL,
            {'ng_id': ngt.id,
             'nodegroup_name': new_name,
             'plugin_name': ngt.plugin_name,
             ngt.plugin_name + '_version': '1.2.1',
             'hadoop_version': ngt.hadoop_version,
             'description': ngt.description,
             'flavor': flavor.id,
             'availability_zone': None,
             'storage': 'ephemeral_drive',
             'volumes_per_node': 0,
             'volumes_size': 0,
             'volume_type': None,
             'volume_local_to_instance': False,
             'volumes_availability_zone': None,
             'floating_ip_pool': None,
             'is_proxy_gateway': False,
             'security_autogroup': True,
             'processes': 'HDFS:namenode',
             'use_autoconfig': True})

        self.assertNoFormErrors(res)
        self.assertRedirectsNoFollow(res, INDEX_URL)
        self.assertMessageCount(success=1)

Example 164

Project: sahara-tests Source File: test_base.py
    def setUp(self):
        super(TestBase, self).setUp()
        with mock.patch(
                'sahara_tests.scenario.base.BaseTestCase.__init__'
        ) as mock_init:
            mock_init.return_value = None
            self.base_scenario = base.BaseTestCase()
        self.base_scenario.credentials = {'os_username': 'admin',
                                          'os_password': 'nova',
                                          'os_tenant': 'admin',
                                          'os_auth_url':
                                              'http://localhost:5000/v2.0',
                                          'sahara_service_type':
                                              'data-processing-local',
                                          'sahara_url':
                                              'http://sahara_host:8386/v1.1',
                                          'ssl_cert': 'sahara_tests/unit/'
                                                      'scenario/dummy.crt',
                                          'ssl_verify': True}
        self.base_scenario.plugin_opts = {'plugin_name': 'vanilla',
                                          'hadoop_version': '2.7.1'}
        self.base_scenario.network = {'type': 'neutron',
                                      'private_network': 'changed_private',
                                      'public_network': 'changed_public',
                                      'auto_assignment_floating_ip': False}
        self.base_scenario.testcase = {
            'node_group_templates': [
                {
                    'name': 'master',
                    'node_processes': ['namenode', 'oozie', 'resourcemanager'],
                    'flavor': '2',
                    'is_proxy_gateway': True
                },
                {
                    'name': 'worker',
                    'node_processes': ['datanode', 'nodemanager'],
                    'flavor': '2'
                }],
            'cluster_template': {
                'name': 'test_name_ct',
                'node_group_templates': {
                    'master': 1,
                    'worker': 3
                }
            },
            'timeout_poll_cluster_status': 300,
            'timeout_delete_resource': 300,
            'timeout_poll_jobs_status': 2,
            'timeout_check_transient': 3,
            'retain_resources': True,
            'image': 'image_name',
            'edp_batching': 1,
            "edp_jobs_flow": {
                "test_flow": [
                    {
                        "type": "Pig",
                        "input_datasource": {
                            "type": "swift",
                            "source": "sahara_tests/scenario/defaults/"
                                      "edp-examples/edp-pig/"
                                      "top-todoers/data/input"
                        },
                        "output_datasource": {
                            "type": "hdfs",
                            "destination": "/user/hadoop/edp-output"
                        },
                        "main_lib": {
                            "type": "swift",
                            "source": "sahara_tests/scenario/defaults/"
                                      "edp-examples/edp-pig/"
                                      "top-todoers/example.pig"
                        }
                    }
                ]
            }
        }
        self.base_scenario.ng_id_map = {'worker': 'set_id', 'master': 'set_id'}
        self.base_scenario.ng_name_map = {}
        self.base_scenario.key_name = 'test_key'
        self.base_scenario.key = 'key_from_yaml'
        self.base_scenario.template_path = ('sahara_tests/scenario/templates/'
                                            'vanilla/2.7.1')
        self.job = self.base_scenario.testcase["edp_jobs_flow"].get(
            'test_flow')[0]
        self.base_scenario.cluster_id = 'some_id'
        self.base_scenario.proxy_ng_name = False
        self.base_scenario.proxy = False
        self.base_scenario.setUpClass()
        timeouts.Defaults.init_defaults(self.base_scenario.testcase)

Example 165

Project: searchlight Source File: test_api.py
    def test_nested_facets(self):
        """Check facets for a nested field (networks.OS-EXT-IPS:type). We
        expect a single count per server matched, not per object in the
        'networks' field. Also check that for fields that are typed as
        'object' (not 'nested') they're marked appropriately
        """
        servers_plugin = self.initialized_plugins['OS::Nova::Server']
        server1 = {
            u'addresses': {
                u'net4': [
                    {u'addr': u'127.0.0.1',
                     u'OS-EXT-IPS:type': u'fixed',
                     u'version': 4},
                    {u'addr': u'127.0.0.1',
                     u'OS-EXT-IPS:type': u'fixed',
                     u'version': 4}
                ]
            },
            u'flavor': {u'id': u'1'},
            u'id': u'6c41b4d1-f0fa-42d6-9d8d-e3b99695aa69',
            u'image': {u'id': u'a'},
            u'name': u'instance1',
            u'created_at': u'2016-04-07T15:49:35Z',
            u'updated_at': u'2016-04-07T15:51:35Z',
            u'status': u'ACTIVE',
            u'tenant_id': TENANT1,
            u'user_id': u'27f4d76b-be62-4e4e-aa33bb11cc55'
        }

        server2 = {
            u'addresses': {
                u'net4': [
                    {u'addr': u'127.0.0.1',
                     u'OS-EXT-IPS:type': u'fixed',
                     u'version': 4},
                    {u'addr': u'127.0.0.1',
                     u'OS-EXT-IPS:type': u'floating',
                     u'version': 4}
                ]
            },
            u'flavor': {u'id': u'1'},
            u'id': u'08ca6c43-eea8-48d0-bbb2-30c50109d5d8',
            u'created_at': u'2016-04-07T15:49:35Z',
            u'updated_at': u'2016-04-07T15:51:35Z',
            u'image': {u'id': u'a'},
            u'name': u'instance2',
            u'status': u'ACTIVE',
            u'tenant_id': TENANT1,
            u'user_id': u'27f4d76b-be62-4e4e-aa33bb11cc55'
        }

        with mock.patch(nova_version_getter, return_value=fake_version_list):
            self._index(
                servers_plugin,
                [test_utils.DictObj(**server1),
                 test_utils.DictObj(**server2)])

        response, json_content = self._facet_request(
            TENANT1,
            doc_type="OS::Nova::Server")

        self.assertEqual(2, json_content['OS::Nova::Server']['doc_count'])

        self.assertEqual(['OS::Nova::Server'],
                         list(six.iterkeys(json_content)))

        # server1 has two fixed addresses (which should be rolled up into one
        # match). server2 has fixed and floating addresses.
        expected = {
            u'name': u'networks.OS-EXT-IPS:type',
            u'options': [
                {u'doc_count': 2, u'key': u'fixed'},
                {u'doc_count': 1, u'key': u'floating'},
            ],
            u'type': u'string',
            u'nested': True
        }
        fixed_network_facet = list(six.moves.filter(
            lambda f: f['name'] == 'networks.OS-EXT-IPS:type',
            json_content['OS::Nova::Server']['facets']
        ))[0]
        self.assertEqual(
            expected,
            fixed_network_facet,
        )

        # Check that 'image.id' (not nested but 'object') has nested=False
        expected = {
            u'name': u'image.id',
            u'type': u'string',
            u'nested': False,
            u'resource_type': u'OS::Glance::Image',
            u'options': [
                {u'doc_count': 2, u'key': u'a'}
            ]
        }
        image_facet = list(six.moves.filter(
            lambda f: f['name'] == 'image.id',
            json_content['OS::Nova::Server']['facets']
        ))[0]
        self.assertEqual(
            expected,
            image_facet,
        )

Example 166

Project: searchlight Source File: test_glance_image_plugin.py
    def test_protected_properties(self):
        extra_props = {
            'x_foo_matcher': 'this is protected',
            'x_foo_something_else': 'this is not protected',
            'z_this_has_no_rules': 'this is protected too'
        }
        image_with_properties = _image_fixture(
            UUID1, owner=TENANT1, checksum=CHECKSUM, name='simple', size=256,
            status='active', extra_properties=extra_props
        )

        with mock.patch('glanceclient.v2.image_members.Controller.list',
                        return_value=[]):
            serialized = self.plugin.serialize(image_with_properties)

        elasticsearch_results = {
            'hits': {
                'hits': [{
                    '_source': copy.deepcopy(serialized),
                    '_type': self.plugin.get_docuement_type(),
                    '_index': self.plugin.alias_name_search
                }]
            }
        }

        # Admin context
        fake_request = unit_test_utils.get_fake_request(
            USER1, TENANT1, '/v1/search', is_admin=True
        )

        for result_hit in elasticsearch_results['hits']['hits']:
            self.plugin.filter_result(result_hit, fake_request.context)

        # This should contain the three properties we added
        expected = {
            'checksum': '93264c3edf5972c9f1cb309543d38a5c',
            'container_format': None,
            'disk_format': None,
            'id': 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d',
            'image_type': 'image',
            'kernel_id': None,
            'members': [],
            'min_disk': None,
            'min_ram': None,
            'name': 'simple',
            'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
            'project_id': '6838eb7b-6ded-434a-882c-b344c77fe8df',
            'protected': False,
            'size': 256,
            'status': 'active',
            'tags': [],
            'virtual_size': None,
            'visibility': 'public',
            'created_at': DATE1,
            'updated_at': DATE1,
            'x_foo_matcher': 'this is protected',
            'x_foo_something_else': 'this is not protected',
            'z_this_has_no_rules': 'this is protected too'
        }

        self.assertEqual(expected,
                         elasticsearch_results['hits']['hits'][0]['_source'])

        # Non admin user. Recreate this because the filter operation modifies
        # it in place and we want a fresh copy
        elasticsearch_results = {
            'hits': {
                'hits': [{
                    '_source': copy.deepcopy(serialized),
                    '_type': self.plugin.get_docuement_type(),
                    '_index': self.plugin.alias_name_search
                }]
            }
        }
        # Non admin context should miss the x_foo property
        fake_request = unit_test_utils.get_fake_request(
            USER1, TENANT1, '/v1/search', is_admin=False
        )

        for result_hit in elasticsearch_results['hits']['hits']:
            self.plugin.filter_result(result_hit, fake_request.context)

        # Should be missing two of the properties
        expected = {
            'checksum': '93264c3edf5972c9f1cb309543d38a5c',
            'container_format': None,
            'disk_format': None,
            'id': 'c80a1a6c-bd1f-41c5-90ee-81afedb1d58d',
            'members': [],
            'min_disk': None,
            'min_ram': None,
            'name': 'simple',
            'owner': '6838eb7b-6ded-434a-882c-b344c77fe8df',
            'project_id': '6838eb7b-6ded-434a-882c-b344c77fe8df',
            'protected': False,
            'size': 256,
            'status': 'active',
            'tags': [],
            'virtual_size': None,
            'visibility': 'public',
            'created_at': DATE1,
            'updated_at': DATE1,
            'x_foo_something_else': 'this is not protected'
        }

        self.assertEqual(expected,
                         elasticsearch_results['hits']['hits'][0]['_source'])

Example 167

Project: swift Source File: __init__.py
def in_process_setup(the_object_server=object_server):
    _info('IN-PROCESS SERVERS IN USE FOR FUNCTIONAL TESTS')
    _info('Using object_server class: %s' % the_object_server.__name__)
    conf_src_dir = os.environ.get('SWIFT_TEST_IN_PROCESS_CONF_DIR')
    show_debug_logs = os.environ.get('SWIFT_TEST_DEBUG_LOGS')

    if conf_src_dir is not None:
        if not os.path.isdir(conf_src_dir):
            msg = 'Config source %s is not a dir' % conf_src_dir
            raise InProcessException(msg)
        _info('Using config source dir: %s' % conf_src_dir)

    # If SWIFT_TEST_IN_PROCESS_CONF specifies a config source dir then
    # prefer config files from there, otherwise read config from source tree
    # sample files. A mixture of files from the two sources is allowed.
    proxy_conf = _in_process_find_conf_file(conf_src_dir, 'proxy-server.conf')
    _info('Using proxy config from %s' % proxy_conf)
    swift_conf_src = _in_process_find_conf_file(conf_src_dir, 'swift.conf')
    _info('Using swift config from %s' % swift_conf_src)

    monkey_patch_mimetools()

    global _testdir
    _testdir = os.path.join(mkdtemp(), 'tmp_functional')
    utils.mkdirs(_testdir)
    rmtree(_testdir)
    utils.mkdirs(os.path.join(_testdir, 'sda1'))
    utils.mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
    utils.mkdirs(os.path.join(_testdir, 'sdb1'))
    utils.mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))

    # Call the associated method for the value of
    # 'SWIFT_TEST_IN_PROCESS_CONF_LOADER', if one exists
    conf_loader_label = os.environ.get(
        'SWIFT_TEST_IN_PROCESS_CONF_LOADER')
    if conf_loader_label is not None:
        try:
            conf_loader = conf_loaders[conf_loader_label]
            _debug('Calling method %s mapped to conf loader %s' %
                   (conf_loader.__name__, conf_loader_label))
        except KeyError as missing_key:
            raise InProcessException('No function mapped for conf loader %s' %
                                     missing_key)

        try:
            # Pass-in proxy_conf
            proxy_conf = conf_loader(proxy_conf)
            _debug('Now using proxy conf %s' % proxy_conf)
        except Exception as err:  # noqa
            raise InProcessException(err)

    swift_conf = _in_process_setup_swift_conf(swift_conf_src, _testdir)
    obj_sockets = _in_process_setup_ring(swift_conf, conf_src_dir, _testdir)

    global orig_swift_conf_name
    orig_swift_conf_name = utils.SWIFT_CONF_FILE
    utils.SWIFT_CONF_FILE = swift_conf
    constraints.reload_constraints()
    storage_policy.SWIFT_CONF_FILE = swift_conf
    storage_policy.reload_storage_policies()
    global config
    if constraints.SWIFT_CONSTRAINTS_LOADED:
        # Use the swift constraints that are loaded for the test framework
        # configuration
        _c = dict((k, str(v))
                  for k, v in constraints.EFFECTIVE_CONSTRAINTS.items())
        config.update(_c)
    else:
        # In-process swift constraints were not loaded, somethings wrong
        raise SkipTest
    global orig_hash_path_suff_pref
    orig_hash_path_suff_pref = utils.HASH_PATH_PREFIX, utils.HASH_PATH_SUFFIX
    utils.validate_hash_conf()

    global _test_socks
    _test_socks = []
    # We create the proxy server listening socket to get its port number so
    # that we can add it as the "auth_port" value for the functional test
    # clients.
    prolis = eventlet.listen(('localhost', 0))
    _test_socks.append(prolis)

    # The following set of configuration values is used both for the
    # functional test frame work and for the various proxy, account, container
    # and object servers.
    config.update({
        # Values needed by the various in-process swift servers
        'devices': _testdir,
        'swift_dir': _testdir,
        'mount_check': 'false',
        'client_timeout': '4',
        'allow_account_management': 'true',
        'account_autocreate': 'true',
        'allow_versions': 'True',
        'allow_versioned_writes': 'True',
        # Below are values used by the functional test framework, as well as
        # by the various in-process swift servers
        'auth_host': '127.0.0.1',
        'auth_port': str(prolis.getsockname()[1]),
        'auth_ssl': 'no',
        'auth_prefix': '/auth/',
        # Primary functional test account (needs admin access to the
        # account)
        'account': 'test',
        'username': 'tester',
        'password': 'testing',
        # User on a second account (needs admin access to the account)
        'account2': 'test2',
        'username2': 'tester2',
        'password2': 'testing2',
        # User on same account as first, but without admin access
        'username3': 'tester3',
        'password3': 'testing3',
        # Service user and prefix (emulates glance, cinder, etc. user)
        'account5': 'test5',
        'username5': 'tester5',
        'password5': 'testing5',
        'service_prefix': 'SERVICE',
        # For tempauth middleware. Update reseller_prefix
        'reseller_prefix': 'AUTH, SERVICE',
        'SERVICE_require_group': 'service',
        # Reseller admin user (needs reseller_admin_role)
        'account6': 'test6',
        'username6': 'tester6',
        'password6': 'testing6'
    })

    # If an env var explicitly specifies the proxy-server object_post_as_copy
    # option then use its value, otherwise leave default config unchanged.
    object_post_as_copy = os.environ.get(
        'SWIFT_TEST_IN_PROCESS_OBJECT_POST_AS_COPY')
    if object_post_as_copy is not None:
        object_post_as_copy = config_true_value(object_post_as_copy)
        config['object_post_as_copy'] = str(object_post_as_copy)
        _debug('Setting object_post_as_copy to %r' % object_post_as_copy)

    acc1lis = eventlet.listen(('localhost', 0))
    acc2lis = eventlet.listen(('localhost', 0))
    con1lis = eventlet.listen(('localhost', 0))
    con2lis = eventlet.listen(('localhost', 0))
    _test_socks += [acc1lis, acc2lis, con1lis, con2lis] + obj_sockets

    account_ring_path = os.path.join(_testdir, 'account.ring.gz')
    with closing(GzipFile(account_ring_path, 'wb')) as f:
        pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
                    [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
                      'port': acc1lis.getsockname()[1]},
                     {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
                      'port': acc2lis.getsockname()[1]}], 30),
                    f)
    container_ring_path = os.path.join(_testdir, 'container.ring.gz')
    with closing(GzipFile(container_ring_path, 'wb')) as f:
        pickle.dump(ring.RingData([[0, 1, 0, 1], [1, 0, 1, 0]],
                    [{'id': 0, 'zone': 0, 'device': 'sda1', 'ip': '127.0.0.1',
                      'port': con1lis.getsockname()[1]},
                     {'id': 1, 'zone': 1, 'device': 'sdb1', 'ip': '127.0.0.1',
                      'port': con2lis.getsockname()[1]}], 30),
                    f)

    eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
    # Turn off logging requests by the underlying WSGI software.
    eventlet.wsgi.HttpProtocol.log_request = lambda *a: None
    logger = utils.get_logger(config, 'wsgi-server', log_route='wsgi')
    # Redirect logging other messages by the underlying WSGI software.
    eventlet.wsgi.HttpProtocol.log_message = \
        lambda s, f, *a: logger.error('ERROR WSGI: ' + f % a)
    # Default to only 4 seconds for in-process functional test runs
    eventlet.wsgi.WRITE_TIMEOUT = 4

    def get_logger_name(name):
        if show_debug_logs:
            return debug_logger(name)
        else:
            return None

    acc1srv = account_server.AccountController(
        config, logger=get_logger_name('acct1'))
    acc2srv = account_server.AccountController(
        config, logger=get_logger_name('acct2'))
    con1srv = container_server.ContainerController(
        config, logger=get_logger_name('cont1'))
    con2srv = container_server.ContainerController(
        config, logger=get_logger_name('cont2'))

    objsrvs = [
        (obj_sockets[index],
         the_object_server.ObjectController(
             config, logger=get_logger_name('obj%d' % (index + 1))))
        for index in range(len(obj_sockets))
    ]

    if show_debug_logs:
        logger = debug_logger('proxy')

    def get_logger(name, *args, **kwargs):
        return logger

    with mock.patch('swift.common.utils.get_logger', get_logger):
        with mock.patch('swift.common.middleware.memcache.MemcacheMiddleware',
                        FakeMemcacheMiddleware):
            try:
                app = loadapp(proxy_conf, global_conf=config)
            except Exception as e:
                raise InProcessException(e)

    nl = utils.NullLogger()
    global proxy_srv
    proxy_srv = prolis
    prospa = eventlet.spawn(eventlet.wsgi.server, prolis, app, nl)
    acc1spa = eventlet.spawn(eventlet.wsgi.server, acc1lis, acc1srv, nl)
    acc2spa = eventlet.spawn(eventlet.wsgi.server, acc2lis, acc2srv, nl)
    con1spa = eventlet.spawn(eventlet.wsgi.server, con1lis, con1srv, nl)
    con2spa = eventlet.spawn(eventlet.wsgi.server, con2lis, con2srv, nl)

    objspa = [eventlet.spawn(eventlet.wsgi.server, objsrv[0], objsrv[1], nl)
              for objsrv in objsrvs]

    global _test_coros
    _test_coros = \
        (prospa, acc1spa, acc2spa, con1spa, con2spa) + tuple(objspa)

    # Create accounts "test" and "test2"
    def create_account(act):
        ts = utils.normalize_timestamp(time())
        account_ring = Ring(_testdir, ring_name='account')
        partition, nodes = account_ring.get_nodes(act)
        for node in nodes:
            # Note: we are just using the http_connect method in the object
            # controller here to talk to the account server nodes.
            conn = swift.proxy.controllers.obj.http_connect(
                node['ip'], node['port'], node['device'], partition, 'PUT',
                '/' + act, {'X-Timestamp': ts, 'x-trans-id': act})
            resp = conn.getresponse()
            assert(resp.status == 201)

    create_account('AUTH_test')
    create_account('AUTH_test2')

Example 168

Project: swift Source File: test_proxy_logging.py
    def test_log_request_stat_type_good(self):
        """
        log_request() should send timing and byte-count counters for GET
        requests.  Also, __call__()'s iter_response() function should
        statsd-log time to first byte (calling the passed-in start_response
        function), but only for GET requests.
        """
        stub_times = []

        def stub_time():
            return stub_times.pop(0)

        path_types = {
            '/v1/a': 'account',
            '/v1/a/': 'account',
            '/v1/a/c': 'container',
            '/v1/a/c/': 'container',
            '/v1/a/c/o': 'object',
            '/v1/a/c/o/': 'object',
            '/v1/a/c/o/p': 'object',
            '/v1/a/c/o/p/': 'object',
            '/v1/a/c/o/p/p2': 'object',
        }
        with mock.patch("time.time", stub_time):
            for path, exp_type in path_types.items():
                # GET
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='7654321', response_str='321 Fubar'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path, environ={
                    'REQUEST_METHOD': 'GET',
                    'wsgi.input': BytesIO(b'4321')})
                stub_times = [18.0, 20.71828182846]
                iter_response = app(req.environ, lambda *_: None)

                self.assertEqual('7654321', ''.join(iter_response))
                self.assertTiming('%s.GET.321.timing' % exp_type, app,
                                  exp_timing=2.71828182846 * 1000)
                self.assertTimingSince(
                    '%s.GET.321.first-byte.timing' % exp_type, app,
                    exp_start=18.0)
                if exp_type == 'object':
                    # Object operations also return stats by policy
                    # In this case, the value needs to match the timing for GET
                    self.assertTiming('%s.policy.0.GET.321.timing' % exp_type,
                                      app, exp_timing=2.71828182846 * 1000)
                    self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
                                             4 + 7),
                                            ('object.policy.0.GET.321.xfer',
                                            4 + 7)],
                                           app)
                else:
                    self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
                                            4 + 7)],
                                           app)

                # GET Repeat the test above, but with a non-existent policy
                # Do this only for object types
                if exp_type == 'object':
                    app = proxy_logging.ProxyLoggingMiddleware(
                        FakeApp(body='7654321', response_str='321 Fubar',
                                policy_idx='-1'), {})
                    app.access_logger = FakeLogger()
                    req = Request.blank(path, environ={
                        'REQUEST_METHOD': 'GET',
                        'wsgi.input': BytesIO(b'4321')})
                    stub_times = [18.0, 20.71828182846]
                    iter_response = app(req.environ, lambda *_: None)

                    self.assertEqual('7654321', ''.join(iter_response))
                    self.assertTiming('%s.GET.321.timing' % exp_type, app,
                                      exp_timing=2.71828182846 * 1000)
                    self.assertTimingSince(
                        '%s.GET.321.first-byte.timing' % exp_type, app,
                        exp_start=18.0)
                    # No results returned for the non-existent policy
                    self.assertUpdateStats([('%s.GET.321.xfer' % exp_type,
                                            4 + 7)],
                                           app)

                # GET with swift.proxy_access_log_made already set
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='7654321', response_str='321 Fubar'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path, environ={
                    'REQUEST_METHOD': 'GET',
                    'swift.proxy_access_log_made': True,
                    'wsgi.input': BytesIO(b'4321')})
                stub_times = [18.0, 20.71828182846]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('7654321', ''.join(iter_response))
                self.assertEqual([], app.access_logger.log_dict['timing'])
                self.assertEqual([],
                                 app.access_logger.log_dict['timing_since'])
                self.assertEqual([],
                                 app.access_logger.log_dict['update_stats'])

                # PUT (no first-byte timing!)
                app = proxy_logging.ProxyLoggingMiddleware(
                    FakeApp(body='87654321', response_str='314 PiTown'), {})
                app.access_logger = FakeLogger()
                req = Request.blank(path, environ={
                    'REQUEST_METHOD': 'PUT',
                    'wsgi.input': BytesIO(b'654321')})
                # (it's not a GET, so time() doesn't have a 2nd call)
                stub_times = [58.2, 58.2 + 7.3321]
                iter_response = app(req.environ, lambda *_: None)
                self.assertEqual('87654321', ''.join(iter_response))
                self.assertTiming('%s.PUT.314.timing' % exp_type, app,
                                  exp_timing=7.3321 * 1000)
                self.assertNotTiming(
                    '%s.GET.314.first-byte.timing' % exp_type, app)
                self.assertNotTiming(
                    '%s.PUT.314.first-byte.timing' % exp_type, app)
                if exp_type == 'object':
                    # Object operations also return stats by policy In this
                    # case, the value needs to match the timing for PUT.
                    self.assertTiming('%s.policy.0.PUT.314.timing' %
                                      exp_type, app,
                                      exp_timing=7.3321 * 1000)
                    self.assertUpdateStats(
                        [('object.PUT.314.xfer', 6 + 8),
                         ('object.policy.0.PUT.314.xfer', 6 + 8)], app)
                else:
                    self.assertUpdateStats(
                        [('%s.PUT.314.xfer' % exp_type, 6 + 8)], app)

                # PUT Repeat the test above, but with a non-existent policy
                # Do this only for object types
                if exp_type == 'object':
                    app = proxy_logging.ProxyLoggingMiddleware(
                        FakeApp(body='87654321', response_str='314 PiTown',
                                policy_idx='-1'), {})
                    app.access_logger = FakeLogger()
                    req = Request.blank(path, environ={
                        'REQUEST_METHOD': 'PUT',
                        'wsgi.input': BytesIO(b'654321')})
                    # (it's not a GET, so time() doesn't have a 2nd call)
                    stub_times = [58.2, 58.2 + 7.3321]
                    iter_response = app(req.environ, lambda *_: None)
                    self.assertEqual('87654321', ''.join(iter_response))
                    self.assertTiming('%s.PUT.314.timing' % exp_type, app,
                                      exp_timing=7.3321 * 1000)
                    self.assertNotTiming(
                        '%s.GET.314.first-byte.timing' % exp_type, app)
                    self.assertNotTiming(
                        '%s.PUT.314.first-byte.timing' % exp_type, app)
                    # No results returned for the non-existent policy
                    self.assertUpdateStats([('object.PUT.314.xfer', 6 + 8)],
                                           app)

Example 169

Project: swift3 Source File: test_middleware.py
    def test_canonical_string_v4(self):
        def canonical_string(path, environ):
            if '?' in path:
                path, query_string = path.split('?', 1)
            else:
                query_string = ''

            env = {
                'REQUEST_METHOD': 'GET',
                'PATH_INFO': path,
                'QUERY_STRING': query_string,
                'HTTP_DATE': 'Mon, 09 Sep 2011 23:36:00 GMT',
                'HTTP_X_AMZ_CONTENT_SHA256': (
                    'e3b0c44298fc1c149afbf4c8996fb924'
                    '27ae41e4649b934ca495991b7852b855')
            }
            env.update(environ)
            with patch('swift3.request.Request._validate_headers'):
                req = SigV4Request(env)
            return req._string_to_sign()

        def verify(hash_val, path, environ):
            s = canonical_string(path, environ)
            s = s.split('\n')[3]
            self.assertEqual(hash_val, s)

        # all next data got from aws4_testsuite from Amazon
        # http://docs.aws.amazon.com/general/latest/gr/samples
        # /aws4_testsuite.zip
        # Each *expected* hash value is the 4th line in <test-name>.sts in the
        # test suite.

        # get-vanilla
        env = {
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host, Signature=X'),
            'HTTP_HOST': 'host.foo.com'}
        verify('366b91fb121d72a00f46bbe8d395f53a'
               '102b06dfb7e79636515208ed3fa606b1',
               '/', env)

        # get-header-value-trim
        env = {
            'REQUEST_METHOD': 'POST',
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host;p, Signature=X'),
            'HTTP_HOST': 'host.foo.com',
            'HTTP_P': 'phfft'}
        verify('dddd1902add08da1ac94782b05f9278c'
               '08dc7468db178a84f8950d93b30b1f35',
               '/', env)

        # get-utf8 (not exact)
        env = {
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host, Signature=X'),
            'HTTP_HOST': 'host.foo.com',
            'RAW_PATH_INFO': '/%E1%88%B4'}

        # This might look weird because actually S3 doesn't care about utf-8
        # encoded multi-byte bucket name from bucket-in-host name constraint.
        # However, aws4_testsuite has only a sample hash with utf-8 *bucket*
        # name to make sure the correctness (probably it can be used in other
        # aws resource except s3) so, to test also utf-8, skip the bucket name
        # validation in the following test.

        # NOTE: eventlet's PATH_INFO is unquoted
        with patch('swift3.request.validate_bucket_name'):
            verify('27ba31df5dbc6e063d8f87d62eb07143'
                   'f7f271c5330a917840586ac1c85b6f6b',
                   unquote('/%E1%88%B4'), env)

        # get-vanilla-query-order-key
        env = {
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host, Signature=X'),
            'HTTP_HOST': 'host.foo.com'}
        verify('2f23d14fe13caebf6dfda346285c6d9c'
               '14f49eaca8f5ec55c627dd7404f7a727',
               '/?a=foo&b=foo', env)

        # post-header-value-case
        env = {
            'REQUEST_METHOD': 'POST',
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host;zoo, Signature=X'),
            'HTTP_HOST': 'host.foo.com',
            'HTTP_ZOO': 'ZOOBAR'}
        verify('3aae6d8274b8c03e2cc96fc7d6bda4b9'
               'bd7a0a184309344470b2c96953e124aa',
               '/', env)

        # post-x-www-form-urlencoded-parameters
        env = {
            'REQUEST_METHOD': 'POST',
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host;content-type, Signature=X'),
            'HTTP_HOST': 'host.foo.com',
            'HTTP_X_AMZ_CONTENT_SHA256':
                '3ba8907e7a252327488df390ed517c45'
                'b96dead033600219bdca7107d1d3f88a',
            'CONTENT_TYPE':
                'application/x-www-form-urlencoded; charset=utf8'}
        verify('c4115f9e54b5cecf192b1eaa23b8e88e'
               'd8dc5391bd4fde7b3fff3d9c9fe0af1f',
               '/', env)

        # post-x-www-form-urlencoded
        env = {
            'REQUEST_METHOD': 'POST',
            'HTTP_AUTHORIZATION': (
                'AWS4-HMAC-SHA256 '
                'Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, '
                'SignedHeaders=date;host;content-type, Signature=X'),
            'HTTP_HOST': 'host.foo.com',
            'HTTP_X_AMZ_CONTENT_SHA256':
                '3ba8907e7a252327488df390ed517c45'
                'b96dead033600219bdca7107d1d3f88a',
            'CONTENT_TYPE':
                'application/x-www-form-urlencoded'}
        verify('4c5c6e4b52fb5fb947a8733982a8a5a6'
               '1b14f04345cbfe6e739236c76dd48f74',
               '/', env)

Example 170

Project: zaqar Source File: test_claims.py
    def test_lifecycle(self):
        doc = '{"ttl": 100, "grace": 60}'

        # First, claim some messages
        body = self.simulate_post(self.claims_path, self.project_id, body=doc)
        self.assertEqual(falcon.HTTP_201, self.srmock.status)

        claimed = jsonutils.loads(body[0])
        claim_href = self.srmock.headers_dict['Location']
        message_href, params = claimed[0]['href'].split('?')

        # No more messages to claim
        self.simulate_post(self.claims_path, self.project_id, body=doc,
                           query_string='limit=3')
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        headers = {
            'Client-ID': str(uuid.uuid4()),
        }

        # Listing messages, by default, won't include claimed
        body = self.simulate_get(self.messages_path, self.project_id,
                                 headers=headers)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Include claimed messages this time
        body = self.simulate_get(self.messages_path, self.project_id,
                                 query_string='include_claimed=true',
                                 headers=headers)
        listed = jsonutils.loads(body[0])
        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self.assertEqual(len(claimed), len(listed['messages']))

        now = timeutils.utcnow() + datetime.timedelta(seconds=10)
        timeutils_utcnow = 'oslo_utils.timeutils.utcnow'
        with mock.patch(timeutils_utcnow) as mock_utcnow:
            mock_utcnow.return_value = now
            body = self.simulate_get(claim_href, self.project_id)

        claim = jsonutils.loads(body[0])

        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self.assertEqual(claim_href,
                         self.srmock.headers_dict['Content-Location'])
        self.assertEqual(100, claim['ttl'])
        # NOTE(cpp-cabrera): verify that claim age is non-negative
        self.assertThat(claim['age'], matchers.GreaterThan(-1))

        # Try to delete the message without submitting a claim_id
        self.simulate_delete(message_href, self.project_id)
        self.assertEqual(falcon.HTTP_403, self.srmock.status)

        # Delete the message and its associated claim
        self.simulate_delete(message_href, self.project_id,
                             query_string=params)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Try to get it from the wrong project
        self.simulate_get(message_href, 'bogus_project', query_string=params)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

        # Get the message
        self.simulate_get(message_href, self.project_id, query_string=params)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

        # Update the claim
        new_claim_ttl = '{"ttl": 60}'
        creation = timeutils.utcnow()
        self.simulate_patch(claim_href, self.project_id, body=new_claim_ttl)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Get the claimed messages (again)
        body = self.simulate_get(claim_href, self.project_id)
        query = timeutils.utcnow()
        claim = jsonutils.loads(body[0])
        message_href, params = claim['messages'][0]['href'].split('?')

        self.assertEqual(60, claim['ttl'])
        estimated_age = timeutils.delta_seconds(creation, query)
        self.assertTrue(estimated_age > claim['age'])

        # Delete the claim
        self.simulate_delete(claim['href'], 'bad_id')
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        self.simulate_delete(claim['href'], self.project_id)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Try to delete a message with an invalid claim ID
        self.simulate_delete(message_href, self.project_id,
                             query_string=params)
        self.assertEqual(falcon.HTTP_400, self.srmock.status)

        # Make sure it wasn't deleted!
        self.simulate_get(message_href, self.project_id, query_string=params)
        self.assertEqual(falcon.HTTP_200, self.srmock.status)

        # Try to get a claim that doesn't exist
        self.simulate_get(claim['href'])
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

        # Try to update a claim that doesn't exist
        self.simulate_patch(claim['href'], body=doc)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

Example 171

Project: zaqar Source File: test_claims.py
    def test_lifecycle(self):
        doc = '{"ttl": 100, "grace": 60}'

        # First, claim some messages
        body = self.simulate_post(self.claims_path, body=doc,
                                  headers=self.headers)
        self.assertEqual(falcon.HTTP_201, self.srmock.status)

        claimed = jsonutils.loads(body[0])['messages']
        claim_href = self.srmock.headers_dict['Location']
        message_href, params = claimed[0]['href'].split('?')

        # No more messages to claim
        self.simulate_post(self.claims_path, body=doc,
                           query_string='limit=3', headers=self.headers)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Listing messages, by default, won't include claimed, will echo
        body = self.simulate_get(self.messages_path,
                                 headers=self.headers,
                                 query_string="echo=true")
        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self._empty_message_list(body)

        # Listing messages, by default, won't include claimed, won't echo
        body = self.simulate_get(self.messages_path,
                                 headers=self.headers,
                                 query_string="echo=false")
        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self._empty_message_list(body)

        # List messages, include_claimed, but don't echo
        body = self.simulate_get(self.messages_path,
                                 query_string='include_claimed=true'
                                              '&echo=false',
                                 headers=self.headers)

        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self._empty_message_list(body)

        # List messages with a different client-id and echo=false.
        # Should return some messages
        headers = self.headers.copy()
        headers["Client-ID"] = str(uuid.uuid4())
        body = self.simulate_get(self.messages_path,
                                 query_string='include_claimed=true'
                                              '&echo=false',
                                 headers=headers)

        self.assertEqual(falcon.HTTP_200, self.srmock.status)

        # Include claimed messages this time, and echo
        body = self.simulate_get(self.messages_path,
                                 query_string='include_claimed=true'
                                              '&echo=true',
                                 headers=self.headers)
        listed = jsonutils.loads(body[0])
        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self.assertEqual(len(claimed), len(listed['messages']))

        now = timeutils.utcnow() + datetime.timedelta(seconds=10)
        timeutils_utcnow = 'oslo_utils.timeutils.utcnow'
        with mock.patch(timeutils_utcnow) as mock_utcnow:
            mock_utcnow.return_value = now
            body = self.simulate_get(claim_href, headers=self.headers)

        claim = jsonutils.loads(body[0])

        self.assertEqual(falcon.HTTP_200, self.srmock.status)
        self.assertEqual(100, claim['ttl'])
        # NOTE(cpp-cabrera): verify that claim age is non-negative
        self.assertThat(claim['age'], matchers.GreaterThan(-1))

        # Try to delete the message without submitting a claim_id
        self.simulate_delete(message_href, headers=self.headers)
        self.assertEqual(falcon.HTTP_403, self.srmock.status)

        # Delete the message and its associated claim
        self.simulate_delete(message_href,
                             query_string=params, headers=self.headers)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Try to get it from the wrong project
        headers = {
            'Client-ID': str(uuid.uuid4()),
            'X-Project-ID': 'bogusproject'
        }
        self.simulate_get(message_href, query_string=params, headers=headers)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

        # Get the message
        self.simulate_get(message_href, query_string=params,
                          headers=self.headers)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

        # Update the claim
        new_claim_ttl = '{"ttl": 60, "grace": 60}'
        creation = timeutils.utcnow()
        self.simulate_patch(claim_href, body=new_claim_ttl,
                            headers=self.headers)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Get the claimed messages (again)
        body = self.simulate_get(claim_href, headers=self.headers)
        query = timeutils.utcnow()
        claim = jsonutils.loads(body[0])
        message_href, params = claim['messages'][0]['href'].split('?')

        self.assertEqual(60, claim['ttl'])
        estimated_age = timeutils.delta_seconds(creation, query)
        self.assertTrue(estimated_age > claim['age'])

        # Delete the claim
        self.simulate_delete(claim['href'], headers=self.headers)
        self.assertEqual(falcon.HTTP_204, self.srmock.status)

        # Try to delete a message with an invalid claim ID
        self.simulate_delete(message_href,
                             query_string=params, headers=self.headers)
        self.assertEqual(falcon.HTTP_400, self.srmock.status)

        # Make sure it wasn't deleted!
        self.simulate_get(message_href, query_string=params,
                          headers=self.headers)
        self.assertEqual(falcon.HTTP_200, self.srmock.status)

        # Try to get a claim that doesn't exist
        self.simulate_get(claim['href'], headers=self.headers)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

        # Try to update a claim that doesn't exist
        self.simulate_patch(claim['href'], body=doc,
                            headers=self.headers)
        self.assertEqual(falcon.HTTP_404, self.srmock.status)

Example 172

Project: glean Source File: test_glean.py
    @mock.patch('platform.dist', new_callable=mock.Mock)
    @mock.patch('subprocess.call', return_value=0, new_callable=mock.Mock)
    @mock.patch('subprocess.check_output', return_value=0,
                new_callable=mock.Mock)
    @mock.patch('os.unlink', return_value=0, new_callable=mock.Mock)
    @mock.patch('os.symlink', return_value=0, new_callable=mock.Mock)
    @mock.patch('os.path.exists', new_callable=mock.Mock)
    @mock.patch('os.listdir', new_callable=mock.Mock)
    @mock.patch('os.system', return_value=0, new_callable=mock.Mock)
    @mock.patch('glean.cmd.open', new_callable=mock.Mock)
    @mock.patch.object(sys, 'argv', ['./glean', '--hostname'])
    def _assert_distro_provider(self, distro, provider, interface,
                                mock_open,
                                mock_os_system,
                                mock_os_listdir,
                                mock_os_path_exists,
                                mock_os_symlink,
                                mock_os_unlink,
                                mock_check_output,
                                mock_call,
                                mock_platform_dist):
        """Main test function

        :param distro: distro to return from "platform.dist"
        :param provider: we will look in fixtures/provider for mocked
                         out files
        :param interface: --interface argument; None for no argument
        """

        mock_platform_dist.return_value = (distro, '', '')

        # These functions are watching the path and faking results
        # based on various things
        # XXX : There are several virtual file-systems available, we
        # might like to look into them and just point ourselves at
        # testing file-systems in the future if this becomes more
        # complex.
        mock_os_path_exists.side_effect = functools.partial(
            self.os_path_exists_side_effect, provider)
        mock_os_listdir.side_effect = functools.partial(
            self.os_listdir_side_effect, provider)
        mock_open.side_effect = functools.partial(
            self.open_side_effect, provider)

        if interface:
            sys.argv.append('--interface=%s' % interface)

        cmd.main()

        output_filename = '%s.%s.network.out' % (provider, distro.lower())
        output_path = os.path.join(sample_data_path, 'test', output_filename)

        # Generate a list of (dest, content) into write_blocks to assert
        write_blocks = []
        lines = open(output_path).readlines()
        write_dest = None
        write_content = None
        for line in lines:
            if line.startswith('### Write '):
                if write_dest is not None:
                    write_blocks.append((write_dest, write_content))
                write_dest = line[len('### Write '):-1]
                write_content = ''
            else:
                write_content += line
        if write_dest is not None:
            write_blocks.append((write_dest, write_content))

        for dest, content in write_blocks:
            if interface and interface not in dest:
                continue
            self.assertNotIn("eth2", dest)
            self.assertIn(dest, self.file_handle_mocks)
            write_handle = self.file_handle_mocks[dest].write
            write_handle.assert_called_once_with(content)

        if self._resolv_unlinked:
            mock_os_unlink.assert_called_once_with('/etc/resolv.conf')

        # Check hostname
        meta_data_path = 'mnt/config/openstack/latest/meta_data.json'
        hostname = None
        with open(os.path.join(sample_data_path, provider,
                               meta_data_path)) as fh:
            meta_data = json.load(fh)
            hostname = meta_data['name']

        mock_call.assert_has_calls([mock.call(['hostname', hostname])])
        if distro.lower() is 'gentoo':
            (self.file_handle_mocks['/etc/conf.d/hostname'].write.
                assert_has_calls([mock.call(hostname)]))
        else:
            self.file_handle_mocks['/etc/hostname'].write.assert_has_calls(
                [mock.call(hostname), mock.call('\n')])

        # Check hosts entry
        hostname_ip = ips[provider]
        calls = [mock.call('%s %s\n' % (hostname_ip, hostname)), ]
        short_hostname = hostname.split('.')[0]
        if hostname != short_hostname:
            calls.append(mock.call('%s %s\n' % (hostname_ip, short_hostname)))

        self.file_handle_mocks['/etc/hosts'].write.assert_has_calls(
            calls, any_order=True)

Example 173

Project: ptp Source File: test_parser.py
    @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse)
    def test_parser_arachni_xml_parse_report(self, mock_lxml_etree_parse):
        # Arachni version 1.0
        from .arachni_reports_1_0 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.0.1
        from .arachni_reports_1_0_1 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0_1 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.0.2
        from .arachni_reports_1_0_2 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0_2 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.0.3
        from .arachni_reports_1_0_3 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0_3 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.0.4
        from .arachni_reports_1_0_4 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0_4 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.0.5
        from .arachni_reports_1_0_5 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0_5 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.0.6
        from .arachni_reports_1_0_6 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_0_6 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
        # Arachni version 1.1
        from .arachni_reports_1_1 import report_low
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_low]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 3))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .arachni_reports_1_1 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            ArachniXMLParser.__format__ = ''
            my_arachni = ArachniXMLParser('foo', 'bar', first=True)
            report = my_arachni.parse_report()
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, has_items(*[{'ranking': LOW}] * 2))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': MEDIUM}])))

Example 174

Project: ptp Source File: test_parser.py
    @mock.patch('lxml.etree.parse', side_effect=lxml_etree_parse)
    def test_parser_arachni_xml_parse_report(self, mock_lxml_etree_parse):
        # W3AF version 1.6.0.2
        from .w3af_reports_1_6_0_2 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': LOW}] * 7))
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 9))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 14))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
        # W3AF version 1.6.0.3
        from .w3af_reports_1_6_0_3 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .w3af_reports_1_6_0_3 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 3))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
        # W3AF version 1.6.0.5
        from .w3af_reports_1_6_0_5 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .w3af_reports_1_6_0_5 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 3))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
        # W3AF version 1.6.45
        from .w3af_reports_1_6_45 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .w3af_reports_1_6_45 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 3))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
        # W3AF version 1.6.46
        from .w3af_reports_1_6_46 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .w3af_reports_1_6_46 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 3))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
        # W3AF version 1.6.49
        from .w3af_reports_1_6_49 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .w3af_reports_1_6_49 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 3))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
        # W3AF version 1.6.50
        from .w3af_reports_1_6_50 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))
        from .w3af_reports_1_6_50 import report_high
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_high]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 3))
            assert_that(report, has_items(*[{'ranking': HIGH}] * 4))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
        # W3AF version 1.6.51
        from .w3af_reports_1_6_51 import report_medium
        with mock.patch('ptp.libptp.parser.AbstractParser._recursive_find', return_value=[report_medium]):
            W3AFXMLParser.__format__ = ''
            my_w3af = W3AFXMLParser('foo', 'bar', first=True)
            report = my_w3af.parse_report()
            assert_that(report, has_items(*[{'ranking': MEDIUM}] * 5))
            assert_that(report, is_not(has_item([{'ranking': UNKNOWN}])))
            assert_that(report, is_not(has_item([{'ranking': INFO}])))
            assert_that(report, is_not(has_item([{'ranking': LOW}])))
            assert_that(report, is_not(has_item([{'ranking': HIGH}])))

Example 175

Project: IRC-Bot Source File: ircbot_tests.py
Function: test_main
    def test_main(self):
        from ircbot import main

        with nested(
            patch('ircbot.check_cfg'),
            patch('ircbot.check_channel'),
            patch('ircbot.create_socket'),
            patch('ircbot.get_datetime'),
            patch('config.owner', new='owner'),
            patch('config.server', new='server'),
            patch('config.nicks', new=['foo', 'bar']),
            patch('config.real_name', new='real name'),
            patch('config.log', new='log_foo'),
            patch('config.cmds_list', new=['baz', 'bar']),
            patch('config.port', new=42),
            patch('signal.signal'),
            patch('ircbot.sigint_handler'),
            patch('config.channels', new=['#chan1', '#chan2']),
            patch('ircbot.connect_to'),
            patch('ircbot.log_write'),
            patch('config.current_nick', new='nick'),
            patch('ircbot.name_bot'),
            patch('ircbot.join_channels'),
            patch('ircbot.run'),
            patch('ircbot.quit_bot'),
            patch('sys.stdout', new=StringIO()),
        ) as (check_cfg, check_channel, create_socket, get_dt,
        owner, server, nicks, real_name, log, cmds_list, port, signal,
        sigint_handler, channels, connect_to, log_write, current_nick,
        name_bot, join_channels, run, quit_bot, stdout):
            s= Mock()
            get_dt.return_value = {'date': '42', 'time': '42'}
            logfile = log + get_dt.return_value['date'] + '.log'
            check_cfg.return_value = False

            self.assertRaises(SystemExit, main)

            check_cfg.return_value = True
            check_channel.return_value = False
            self.assertRaises(SystemExit, main)

            check_channel.return_value = True
            create_socket.return_value = False
            connect_to.return_value = False
            self.assertIsNone(main())

            create_socket.return_value = True
            connect_to.return_value = False
            self.assertIsNone(main())

            create_socket.return_value = False
            connect_to.return_value = True
            self.assertIsNone(main())

            create_socket.return_value = s
            connect_to.return_value = True

            main()

            connect_msg = 'Connected to {0}:{1}\n'.format(server, port)
            disconnect_msg = 'Disconnected from {0}:{1}\n'.format(server, port)

            expected_log_write_calls = [
                call(logfile, '42', ' <> ',
                    connect_msg),
                call(logfile, '42', ' <> ',
                    disconnect_msg),
            ]

            self.assertListEqual(expected_log_write_calls,
                log_write.call_args_list)

            self.assertEqual(stdout.getvalue(), connect_msg + disconnect_msg)
            s.close.assert_called_with()
            name_bot.assert_called_with(s, nicks, real_name, logfile)
            join_channels.assert_called_with(channels, s, logfile)
            run.assert_called_with(s, channels, cmds_list, name_bot(), logfile)

            join_channels.return_value = False
            log_write.call_args_list = []
            main()

            connect_msg = 'Connected to {0}:{1}\n'.format(server, port)
            disconnect_msg = 'Disconnected from {0}:{1}\n'.format(server, port)

            expected_log_write_calls = [
                call(logfile, '42', ' <> ',
                    connect_msg),
                call(logfile, '42', ' <> ',
                    disconnect_msg),
            ]

            self.assertListEqual(expected_log_write_calls,
                log_write.call_args_list)

            self.assertEqual(stdout.getvalue(), (connect_msg + disconnect_msg)*2)
            s.close.assert_called_with()
            name_bot.assert_called_with(s, nicks, real_name, logfile)
            join_channels.assert_called_with(channels, s, logfile)
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Page 4 Selected