mock.patch

Here are the examples of the python api mock.patch taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: bsd-cloudinit
Source File: test_network.py
View license
    @mock.patch('cloudbaseinit.utils.windows.network._format_mac_address')
    @mock.patch('cloudbaseinit.utils.windows.network._socket_addr_to_str')
    @mock.patch('cloudbaseinit.utils.windows.network'
                '._get_registry_dhcp_server')
    def _test_get_adapter_addresses(self, mock_get_registry_dhcp_server,
                                    mock_socket_addr_to_str,
                                    mock_format_mac_address,
                                    ret_val, p, ret_val2, xp_data_length):
        self.maxDiff = None

        mock_byref = self._ctypes_mock.byref
        mock_cast = self._ctypes_mock.cast
        mock_POINTER = self._ctypes_mock.POINTER

        self.network.iphlpapi.GetAdaptersAddresses.side_effect = [ret_val,
                                                                  ret_val2]
        self.network.kernel32.HeapAlloc.return_value = p
        self.network.iphlpapi.IP_ADAPTER_DHCP_ENABLED = True
        self.network.iphlpapi.IP_ADAPTER_IPV4_ENABLED = True
        self.network.iphlpapi.IP_ADAPTER_ADDRESSES_SIZE_2003 = xp_data_length

        p_curr_addr = mock.MagicMock()

        compare_cast = []
        net_adapters = []
        compare_socket_addr_to_str = []

        mock_cast.side_effect = [p_curr_addr, None, None]
        curr_addr = p_curr_addr.contents
        curr_addr.Flags = True
        curr_addr.Union1.Struct1.Length = 2
        curr_addr.Dhcpv4Server.iSockaddrLength = True

        p_unicast_addr = curr_addr.FirstUnicastAddress
        unicast_addr = p_unicast_addr.contents
        unicast_addresses = [
            (mock_socket_addr_to_str.return_value,
             unicast_addr.Address.lpSockaddr.contents.sa_family)]

        filter_flags = (self.network.iphlpapi.GAA_FLAG_SKIP_ANYCAST |
                        self.network.iphlpapi.GAA_FLAG_SKIP_MULTICAST)

        compare_GetAdaptersAddresses = [mock.call(
            self.network.ws2_32.AF_UNSPEC,
            filter_flags,
            None, None, mock_byref.return_value)]

        if not p:
            self.assertRaises(cbinit_exception.CloudbaseInitException,
                              self.network.get_adapter_addresses)

        if ret_val2 and ret_val2 != self.network.kernel32.ERROR_NO_DATA:
            self.assertRaises(cbinit_exception.CloudbaseInitException,
                              self.network.get_adapter_addresses)
            compare_cast.append(mock.call(p, mock_POINTER.return_value))

            compare_GetAdaptersAddresses.append(mock.call(
                self.network.ws2_32.AF_UNSPEC,
                filter_flags, None,
                p_curr_addr, mock_byref.return_value))

        else:
            response = self.network.get_adapter_addresses()

            if ret_val == self.network.kernel32.ERROR_NO_DATA:
                self.assertEqual([], response)

            elif ret_val == self.network.kernel32.ERROR_BUFFER_OVERFLOW:
                self.network.kernel32.GetProcessHeap.assert_called_once_with()

                self.network.kernel32.HeapAlloc.assert_called_once_with(
                    self.network.kernel32.GetProcessHeap.return_value, 0,
                    self._ctypes_mock.wintypes.ULONG.return_value.value)

                self.network.ws2_32.init_wsa.assert_called_once_with()
                compare_cast.append(mock.call(p, mock_POINTER.return_value))

                compare_GetAdaptersAddresses.append(mock.call(
                    self.network.ws2_32.AF_UNSPEC,
                    filter_flags, None,
                    p_curr_addr, mock_byref.return_value))

                if ret_val2 == self.network.kernel32.ERROR_NO_DATA:
                    self.assertEqual([], response)

                else:
                    compare_cast.append(mock.call(p_unicast_addr.contents.Next,
                                                  mock_POINTER.return_value))

                    mock_format_mac_address.assert_called_once_with(
                        p_curr_addr.contents.PhysicalAddress,
                        p_curr_addr.contents.PhysicalAddressLength)

                    if not curr_addr.Union1.Struct1.Length <= xp_data_length:
                        dhcp_server = mock_socket_addr_to_str.return_value
                        compare_socket_addr_to_str.append(
                            mock.call(curr_addr.Dhcpv4Server |
                                      curr_addr.Dhcpv6Server))
                    else:
                        dhcp_server = \
                            mock_get_registry_dhcp_server.return_value

                        mock_get_registry_dhcp_server.assert_called_once_with(
                            curr_addr.AdapterName)

                    compare_cast.append(mock.call(curr_addr.Next,
                                                  mock_POINTER.return_value))
                    self.network.kernel32.HeapFree.assert_called_once_with(
                        self.network.kernel32.GetProcessHeap.return_value, 0,
                        p)

                    self.network.ws2_32.WSACleanup.assert_called_once_with()

                    compare_socket_addr_to_str.append(mock.call(
                        unicast_addr.Address))

                    net_adapters.append(
                        {"interface_index": curr_addr.Union1.Struct1.IfIndex,
                         "adapter_name": curr_addr.AdapterName,
                         "friendly_name": curr_addr.FriendlyName,
                         "description": curr_addr.Description,
                         "mtu": curr_addr.Mtu,
                         "mac_address": mock_format_mac_address.return_value,
                         "dhcp_enabled": True,
                         "dhcp_server": dhcp_server,
                         "interface_type": curr_addr.IfType,
                         "unicast_addresses": unicast_addresses})

                    self.assertEqual(net_adapters, response)

        self.assertEqual(compare_cast, mock_cast.call_args_list)

        self.assertEqual(
            compare_GetAdaptersAddresses,
            self.network.iphlpapi.GetAdaptersAddresses.call_args_list)

Example 2

Project: pypowervm
Source File: test_memory.py
View license
    @mock.patch('pypowervm.wrappers.job.Job.wrap')
    @mock.patch('pypowervm.wrappers.job.Job.run_job')
    @mock.patch('pypowervm.wrappers.job.Job.create_job_parameter')
    @mock.patch('pypowervm.wrappers.job.Job.get_job_results_as_dict')
    def test_calculate_memory_overhead_on_host(self, mock_job_dict_res,
                                               mock_job_p,
                                               mock_run_job,
                                               mock_job_w):
        """Performs a simple set of calculate_memory_overhead_on_host tests."""

        def _reset_mocks():
            mock_job_w.reset_mock()
            mock_job_p.reset_mock()
            mock_run_job.reset_mock()
            mock_job_dict_res.reset_mock()

        def raise_exc_se():
            raise Exception

        mock_job_w.return_value = self.mock_job
        mock_host_uuid = '1234'
        args = ['ManagedSystem', mock_host_uuid]
        kwargs = {'suffix_type': 'do', 'suffix_parm': ('QueryReservedMemory'
                                                       'RequiredForPartition')}

        # test empty job results dictionary with defaults
        mock_job_dict_res.return_value = {'RequiredMemory': None,
                                          'CurrentAvailableSystemMemory': None}
        overhead, avail = (memory.
                           calculate_memory_overhead_on_host(self.adpt,
                                                             mock_host_uuid))
        self.adpt.read.assert_called_once_with(*args, **kwargs)
        self.assertEqual(1, mock_job_w.call_count)
        self.assertEqual(6, mock_job_p.call_count)
        self.assertEqual(1, mock_run_job.call_count)
        self.assertEqual(1, mock_job_dict_res.call_count)
        self.assertEqual(512, overhead)
        self.assertEqual(None, avail)
        _reset_mocks()

        # test with desired mem and non empty job results dict
        mock_job_dict_res.return_value = {'RequiredMemory': 1024,
                                          'CurrentAvailableSystemMemory':
                                          32768}
        reserved_mem_data = {'desired_mem': 768, 'num_virt_eth_adapters': 2}
        kwargs2 = {'reserved_mem_data': reserved_mem_data}
        overhead, avail = (memory.
                           calculate_memory_overhead_on_host(self.adpt,
                                                             mock_host_uuid,
                                                             **kwargs2))
        self.assertEqual(6, mock_job_p.call_count)
        self.assertEqual((1024-768), overhead)
        self.assertEqual(32768, avail)
        _reset_mocks()

        # test defaults when run_job fails
        mock_run_job.side_effect = raise_exc_se
        overhead, avail = (memory.
                           calculate_memory_overhead_on_host(self.adpt,
                                                             mock_host_uuid))
        mock_job_p.assert_any_call('LogicalPartitionEnvironment',
                                   'AIX/Linux')
        mock_job_p.assert_any_call('DesiredMemory', '512')
        mock_job_p.assert_any_call('MaximumMemory', '32768')
        mock_job_p.assert_any_call('NumberOfVirtualEthernetAdapter', '2')
        mock_job_p.assert_any_call('NumberOfVirtualSCSIAdapter', '1')
        mock_job_p.assert_any_call('NumberOfVirtualFibreChannelAdapter', '1')
        self.assertEqual(512, overhead)
        self.assertEqual(None, avail)
        self.assertEqual(0, mock_job_dict_res.call_count)
        _reset_mocks()

        # test reserved_mem_data values are created as job params
        reserved_mem_data = {'desired_mem': 2048,
                             'max_mem': 65536,
                             'lpar_env': 'OS400',
                             'num_virt_eth_adapters': 4,
                             'num_vscsi_adapters': 5,
                             'num_vfc_adapters': 6}
        kwargs3 = {'reserved_mem_data': reserved_mem_data}
        overhead, avail = (memory.
                           calculate_memory_overhead_on_host(self.adpt,
                                                             mock_host_uuid,
                                                             **kwargs3))
        mock_job_p.assert_any_call('LogicalPartitionEnvironment',
                                   'OS400')
        mock_job_p.assert_any_call('DesiredMemory', '2048')
        mock_job_p.assert_any_call('MaximumMemory', '65536')
        mock_job_p.assert_any_call('NumberOfVirtualEthernetAdapter', '4')
        mock_job_p.assert_any_call('NumberOfVirtualSCSIAdapter', '5')
        mock_job_p.assert_any_call('NumberOfVirtualFibreChannelAdapter', '6')
        self.assertEqual(512, overhead)
        self.assertEqual(None, avail)

Example 3

Project: pypowervm
Source File: test_power.py
View license
    @mock.patch('pypowervm.wrappers.job.Job.run_job')
    @mock.patch('pypowervm.wrappers.job.Job.create_job_parameter')
    @mock.patch('pypowervm.wrappers.logical_partition.LPAR')
    def test_power_on_off(self, mock_lpar, mock_job_p, mock_run_job):
        """Performs a simple set of Power On/Off Tests."""
        def run_job_mock(**kwargs1):
            """Produce a run_job method that validates the given kwarg values.

            E.g. run_job_mock(foo='bar') will produce a mock run_job that
            asserts its foo argument is 'bar'.
            """
            def run_job(*args, **kwargs2):
                for key, val in kwargs1.items():
                    self.assertEqual(val, kwargs2[key])
            return run_job

        mock_lpar.adapter = self.adpt
        power._power_on_off(mock_lpar, 'PowerOn', '1111')
        self.assertEqual(1, mock_run_job.call_count)
        self.assertEqual(0, mock_job_p.call_count)
        self.assertEqual(1, self.adpt.invalidate_cache_elem.call_count)
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()
        self.adpt.reset_mock()

        # Try a power off
        power._power_on_off(mock_lpar, 'PowerOff', '1111')
        self.assertEqual(1, mock_run_job.call_count)
        # Only the operation parameter is appended
        self.assertEqual(1, mock_job_p.call_count)
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()

        # Try a power off when the RMC state is active
        mock_lpar.rmc_state = pvm_bp.RMCState.ACTIVE
        power._power_on_off(mock_lpar, 'PowerOff', '1111')
        self.assertEqual(1, mock_run_job.call_count)
        # The operation and immediate(no-delay) parameters are appended
        self.assertEqual(2, mock_job_p.call_count)
        mock_lpar.reset_mock()
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()

        # Try a power off of IBMi
        mock_lpar.rmc_state = pvm_bp.RMCState.INACTIVE
        mock_lpar.env = pvm_bp.LPARType.OS400
        mock_lpar.ref_code = '00000000'
        power._power_on_off(mock_lpar, 'PowerOff', '1111')
        self.assertEqual(1, mock_run_job.call_count)
        # Only the operation parameter is appended
        self.assertEqual(1, mock_job_p.call_count)
        mock_job_p.assert_called_with('operation', 'osshutdown')
        mock_lpar.reset_mock()
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()

        # Try a more complex power off
        power._power_on_off(mock_lpar, 'PowerOff', '1111',
                            force_immediate=True, restart=True, timeout=100)
        self.assertEqual(1, mock_run_job.call_count)
        self.assertEqual(3, mock_job_p.call_count)
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()

        mock_run_job.side_effect = run_job_mock(synchronous=True)
        # Try optional parameters
        power.power_on(mock_lpar, '1111',
                       add_parms={power.BootMode.KEY: power.BootMode.SMS})
        self.assertEqual(1, mock_run_job.call_count)
        self.assertEqual(1, mock_job_p.call_count)
        mock_job_p.assert_called_with(power.BootMode.KEY, power.BootMode.SMS)
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()

        power.power_on(mock_lpar, '1111', add_parms={
            pvm_lpar.IPLSrc.KEY: pvm_lpar.IPLSrc.A}, synchronous=True)
        self.assertEqual(1, mock_run_job.call_count)
        self.assertEqual(1, mock_job_p.call_count)
        mock_job_p.assert_called_with(pvm_lpar.IPLSrc.KEY, pvm_lpar.IPLSrc.A)
        mock_run_job.reset_mock()
        mock_job_p.reset_mock()

        mock_run_job.side_effect = run_job_mock(synchronous=False)
        power.power_on(mock_lpar, '1111', add_parms={
            power.KeylockPos.KEY: power.KeylockPos.MANUAL}, synchronous=False)
        self.assertEqual(1, mock_run_job.call_count)
        self.assertEqual(1, mock_job_p.call_count)
        mock_job_p.assert_called_with(power.KeylockPos.KEY,
                                      power.KeylockPos.MANUAL)

Example 4

Project: pypowervm
Source File: test_vopt.py
View license
    @mock.patch('pypowervm.tasks.partition.get_active_vioses')
    @mock.patch('pypowervm.wrappers.storage.VG.get')
    @mock.patch('pypowervm.wrappers.storage.VMediaRepos.bld')
    def test_validate_vopt_vg2(self, mock_vmr_bld, mock_vg_get, mock_vios_get):
        """Dual VIOS, multiple VGs, repos on non-rootvg."""
        vwrap1 = mock.Mock()
        vwrap1.configure_mock(name='vio1', rmc_state='active', uuid='vio_id1',
                              is_mgmt_partition=False)
        vwrap2 = mock.Mock()
        vwrap2.configure_mock(name='vio2', rmc_state='active', uuid='vio_id2',
                              is_mgmt_partition=False)
        mock_vios_get.return_value = [vwrap1, vwrap2]
        vg1 = mock.Mock()
        vg1.configure_mock(name='rootvg', vmedia_repos=[], uuid='vg1')
        vg2 = mock.Mock()
        vg2.configure_mock(name='other1vg', vmedia_repos=[], uuid='vg2')
        vg3 = mock.Mock()
        vg3.configure_mock(name='rootvg', vmedia_repos=[], uuid='vg3')
        vg4 = mock.Mock()
        vg4.configure_mock(name='other2vg', vmedia_repos=[1], uuid='vg4')

        # 1: Find the media repos on non-rootvg on the second VIOS
        mock_vg_get.side_effect = [[vg1, vg2], [vg3, vg4]]

        vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
        self.assertEqual('vio_id2', vio_ret_uuid)
        self.assertEqual('vg4', vg_ret_uuid)

        mock_vios_get.reset_mock()
        mock_vg_get.reset_mock()

        # 2: At this point, the statics are set.  If we validate again, and the
        # VG.get returns the right one, we should bail out early.
        mock_vg_get.side_effect = None
        mock_vg_get.return_value = vg4

        vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
        self.assertEqual('vio_id2', vio_ret_uuid)
        self.assertEqual('vg4', vg_ret_uuid)

        # Statics unchanged
        self.assertEqual('vg4', vopt._cur_vg_uuid)
        self.assertEqual('vio_id2', vopt._cur_vios_uuid)

        # We didn't have to query the VIOS
        mock_vios_get.assert_not_called()
        # We only did VG.get once
        self.assertEqual(1, mock_vg_get.call_count)

        mock_vg_get.reset_mock()

        # 3: Same again, but this time the repos is somewhere else.  We should
        # find it.
        vg4.vmedia_repos = []
        vg2.vmedia_repos = [1]
        # The first VG.get is looking for the already-set repos.  The second
        # will be the feed from the first VIOS.  There should be no third call,
        # since we should find the repos on VIOS 2.
        mock_vg_get.side_effect = [vg4, [vg1, vg2]]

        vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
        self.assertEqual('vio_id1', vio_ret_uuid)
        self.assertEqual('vg2', vg_ret_uuid)

        # And the static values
        self.assertEqual('vg2', vopt._cur_vg_uuid)
        self.assertEqual('vio_id1', vopt._cur_vios_uuid)

        mock_vg_get.reset_mock()
        mock_vios_get.reset_mock()

        # 4: No repository anywhere - need to create one.  The default VG name
        # (rootvg) exists in multiple places.  Ensure we create in the first
        # one, for efficiency.
        vg2.vmedia_repos = []
        mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
        vg1.update.return_value = vg1

        vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
        self.assertEqual('vio_id1', vio_ret_uuid)
        self.assertEqual('vg1', vg_ret_uuid)

        self.assertEqual('vg1', vopt._cur_vg_uuid)
        self.assertEqual('vio_id1', vopt._cur_vios_uuid)
        self.assertEqual([mock_vmr_bld.return_value], vg1.vmedia_repos)

        mock_vg_get.reset_mock()
        mock_vios_get.reset_mock()
        vg1 = mock.MagicMock()

        # 5: No repos, need to create one.  But not on the mgmt partition.
        vwrap1.configure_mock(name='vio1', rmc_state='active', uuid='vio_id1',
                              is_mgmt_partition=True)
        vg3.vmedia_repos = []
        mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
        vg3.update.return_value = vg3

        vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(self.apt)
        self.assertEqual('vio_id2', vio_ret_uuid)
        self.assertEqual('vg3', vg_ret_uuid)

        self.assertEqual('vg3', vopt._cur_vg_uuid)
        self.assertEqual('vio_id2', vopt._cur_vios_uuid)
        self.assertEqual([mock_vmr_bld.return_value], vg3.vmedia_repos)

        mock_vg_get.reset_mock()
        mock_vios_get.reset_mock()
        vg3 = mock.MagicMock()

        # 6: No repos, and a configured VG name that doesn't exist
        vwrap1.configure_mock(name='vio1', rmc_state='active', uuid='vio_id1',
                              is_mgmt_partition=False)
        vg4.vmedia_repos = []
        mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]

        self.assertRaises(pvm_ex.NoMediaRepoVolumeGroupFound,
                          vopt.validate_vopt_repo_exists, self.apt,
                          vopt_media_volume_group='mythicalvg')

        # 7: No repos - need to create.  Make sure conf setting is honored.
        vg1.vmedia_repos = []

        mock_vg_get.side_effect = [vg1, [vg1, vg2], [vg3, vg4]]
        vg4.update.return_value = vg4

        vio_ret_uuid, vg_ret_uuid = vopt.validate_vopt_repo_exists(
            self.apt, vopt_media_volume_group='other2vg')
        self.assertEqual('vio_id2', vio_ret_uuid)
        self.assertEqual('vg4', vg_ret_uuid)

        self.assertEqual('vg4', vopt._cur_vg_uuid)
        self.assertEqual('vio_id2', vopt._cur_vios_uuid)
        self.assertEqual([mock_vmr_bld.return_value], vg4.vmedia_repos)
        vg1.update.assert_not_called()

Example 5

Project: pypowervm
Source File: test_validation.py
View license
    def test_validator(self):
        # Test desired proc units > host avail proc units fails for shared
        vldr = vldn.LPARWrapperValidator(self.lpar_21_procs, self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)

        # Test desired proc units < host avail proc units passes for shared
        vldn.LPARWrapperValidator(self.lpar_1_proc,
                                  self.mngd_sys).validate_all()

        # Test desired proc units > host avail proc units fails for dedicated
        vldr = vldn.LPARWrapperValidator(self.lpar_21_proc_ded, self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)

        # Test desired proc units < host avail proc units passes for dedicated
        vldn.LPARWrapperValidator(self.lpar_1_proc_ded,
                                  self.mngd_sys).validate_all()

        # Test resize fails with inactive rmc
        vldr = vldn.LPARWrapperValidator(self.lpar_1_proc, self.mngd_sys,
                                         cur_lpar_w=self.lpar_no_rmc)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test resize fails with no mem dlpar
        vldr = vldn.LPARWrapperValidator(self.lpar_1_proc, self.mngd_sys,
                                         cur_lpar_w=self.lpar_bad_mem_dlpar)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test resize fails with no proc dlpar
        vldr = vldn.LPARWrapperValidator(self.lpar_1_proc, self.mngd_sys,
                                         cur_lpar_w=self.lpar_bad_proc_dlpar)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)

        # Test dedicated procs > host max allowed procs per lpar fails
        vldr = vldn.LPARWrapperValidator(self.lpar_11_proc_ded, self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test dedicated max procs > host max sys procs limit fails
        vldr = vldn.LPARWrapperValidator(self.lpar_16_proc_max_ded,
                                         self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test shared desired vcpus > host max allowed vcpus per lpar fails
        vldr = vldn.LPARWrapperValidator(self.lpar_11_vcpus, self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test shared desired max vcpus > host max sys vcpus limit fails
        vldr = vldn.LPARWrapperValidator(self.lpar_16_max_vcpus, self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)

        # Test desired memory > host available memory fails
        vldr = vldn.LPARWrapperValidator(self.lpar_48g_mem, self.mngd_sys)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)

        # Test changing min vcpus fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_1_min_vcpus, self.mngd_sys,
                                         cur_lpar_w=self.lpar_2_min_vcpus)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing max vcpus fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_8_max_vcpus, self.mngd_sys,
                                         cur_lpar_w=self.lpar_6_max_vcpus)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing min proc units fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_3_min_proc_units,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_min_proc_units)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing max proc units fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_9_max_proc_units,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_6_max_proc_units)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing min memory fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_512mb_min_mem,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_1gb_min_mem)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing max memory fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_8g_max_mem, self.mngd_sys,
                                         cur_lpar_w=self.lpar_6g_max_mem)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing AME expansion factor from 2 to 3 fails active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_ame_3, self.mngd_sys,
                                         cur_lpar_w=self.lpar_ame_2)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test toggling AME fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_ame_2, self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test resizing lpar from defaultSPP to non-defaultSPP passes
        vldr = vldn.LPARWrapperValidator(self.lpar_non_default_spp,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_default_spp)
        vldr.validate_all()
        # Test resizing lpar from non-defaultSPP to defaultSPP passes
        vldr = vldn.LPARWrapperValidator(self.lpar_default_spp,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_non_default_spp)
        vldr.validate_all()
        # Test changing from dedicated to non-defaultSPP passes
        vldr = vldn.LPARWrapperValidator(self.lpar_non_default_spp,
                                         self.mngd_sys,
                                         self.lpar_1_proc_ded_inactive)
        vldr.validate_all()
        # Test changing processor mode (shared -> ded) fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_1_proc_ded,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing processor mode (ded to shared) fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_1_proc,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc_ded)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing processor compatibility mode fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_power8_proc_compat,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test changing SRR capabilty fails for active resize
        vldr = vldn.LPARWrapperValidator(self.lpar_srr_disabled,
                                         self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test desired delta proc units > host avail proc units fails
        # during resize (shared -> shared)
        vldr = vldn.LPARWrapperValidator(self.lpar_22_procs, self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test desired delta proc units <= host avail proc units passes
        # during resize (shared -> shared)
        vldn.LPARWrapperValidator(self.lpar_21_procs,
                                  self.mngd_sys,
                                  cur_lpar_w=self.lpar_1_proc).validate_all()
        # Test desired delta proc units > host avail proc units fails
        # during resize (dedicated -> dedicated)
        vldr = vldn.LPARWrapperValidator(self.lpar_22_proc_ded, self.mngd_sys,
                                         cur_lpar_w=self.lpar_1_proc_ded)
        self.assertRaises(vldn.ValidatorException, vldr.validate_all)
        # Test desired delta proc units <= host avail proc units passes
        # during resize (dedicated -> dedicated)
        vldn.LPARWrapperValidator(self.lpar_4_proc_ded,
                                  self.mngd_sys,
                                  self.lpar_1_proc_ded).validate_all()
        # Test resize delta mem
        mem_vldr = vldn.MemValidator(self.lpar_6g_mem, self.mngd_sys,
                                     cur_lpar_w=self.lpar_4g_mem)
        mem_vldr._populate_new_values()
        mem_vldr._populate_resize_diffs()
        self.assertEqual(2048, mem_vldr.delta_des_mem,
                         'Incorrect resize delta memory calculation')
        # Test resize delta procs
        proc_vldr = vldn.ProcValidator(self.lpar_4_proc_ded, self.mngd_sys,
                                       cur_lpar_w=self.lpar_1_proc_ded)
        proc_vldr._populate_new_values()
        proc_vldr._populate_resize_diffs()
        self.assertEqual(3, proc_vldr.delta_des_vcpus,
                         'Incorrect resize delta proc calculation'
                         ' in dedicated mode')
        proc_vldr = vldn.ProcValidator(self.lpar_2dot2_proc_units,
                                       self.mngd_sys,
                                       cur_lpar_w=self.lpar_1dot6_proc_units)
        proc_vldr._populate_new_values()
        proc_vldr._populate_resize_diffs()
        self.assertEqual(0.60, proc_vldr.delta_des_vcpus,
                         'Incorrect resize delta proc calculation in'
                         ' shared mode')
        proc_vldr = vldn.ProcValidator(self.lpar_1dot6_proc_units,
                                       self.mngd_sys,
                                       cur_lpar_w=self.lpar_1_proc_ded)
        proc_vldr._populate_new_values()
        proc_vldr._populate_resize_diffs()
        self.assertEqual(0.60, proc_vldr.delta_des_vcpus,
                         'Incorrect delta proc calculation while resizing '
                         'from dedicated to shared mode')
        proc_vldr = vldn.ProcValidator(self.lpar_4_proc_ded, self.mngd_sys,
                                       cur_lpar_w=self.lpar_1dot6_proc_units)
        proc_vldr._populate_new_values()
        proc_vldr._populate_resize_diffs()
        self.assertEqual(2.40, proc_vldr.delta_des_vcpus,
                         'Incorrect delta proc calculation while resizing '
                         'from shared to dedicated mode')
        # Test resizing not activated state lpar makes inactive_resize_checks
        with mock.patch('pypowervm.utils.validation.ProcValidator.'
                        '_validate_inactive_resize') as inactive_resize_checks:
            proc_vldr = vldn.ProcValidator(self.lpar_not_activated,
                                           self.mngd_sys,
                                           cur_lpar_w=self.lpar_not_activated)
            proc_vldr.validate()
            self.assertTrue(inactive_resize_checks.called,
                            'Inactive resize validations not performed.')
        # Test resizing running state lpar makes active_resize_checks
        with mock.patch('pypowervm.utils.validation.ProcValidator.'
                        '_validate_active_resize') as active_resize_checks:
            proc_vldr = vldn.ProcValidator(self.lpar_running, self.mngd_sys,
                                           cur_lpar_w=self.lpar_running)
            proc_vldr.validate()
            self.assertTrue(active_resize_checks.called,
                            'Active resize validations not performed.')
        # Test resizing starting state lpar makes active_resize_checks
        with mock.patch('pypowervm.utils.validation.ProcValidator.'
                        '_validate_active_resize') as active_resize_checks:
            proc_vldr = vldn.ProcValidator(self.lpar_starting, self.mngd_sys,
                                           cur_lpar_w=self.lpar_starting)
            proc_vldr.validate()
            self.assertTrue(active_resize_checks.called,
                            'Active resize validations not performed.')

Example 6

Project: pulp
Source File: test_openssl.py
View license
    @mock.patch('shutil.rmtree')
    @mock.patch('pulp.repoauth.openssl.subprocess.check_call')
    @mock.patch('pulp.repoauth.openssl.tempfile.mkdtemp')
    @mock.patch('pulp.repoauth.openssl.tempfile.NamedTemporaryFile')
    def test_verify_signature_invalid(self, NamedTemporaryFile, mkdtemp, check_call, mock_rmtree):
        """
        Ensure that verify() returns False when the signature is invalid.
        """
        a_tempdir = '/some/dir/'
        cert_filename = '%s%s' % (a_tempdir, 'a.crt')
        ca_filename = '%s%s' % (a_tempdir, 'ca.pack')

        fake_filenames = [cert_filename, ca_filename]
        fake_files = []

        def fake_NamedTemporaryFile(mode, dir, delete):
            fake_file = mock.MagicMock()
            fake_file.name = fake_filenames.pop(0)
            fake_files.append(fake_file)
            return fake_file

        NamedTemporaryFile.side_effect = fake_NamedTemporaryFile

        mkdtemp.return_value = a_tempdir

        # The first time should succeed, the second time should error (simulating openssl failing
        # the signature check).
        check_call_side_effects = [None, subprocess.CalledProcessError(mock.MagicMock(),
                                                                       mock.MagicMock())]

        def fake_check_call(*args, **kwargs):
            """
            Does nothing the first time it is called, and then raises CalledProcessError the second
            time to simulate the certificate check failing.
            """
            what_to_do = check_call_side_effects.pop(0)
            if what_to_do:
                raise what_to_do

        # This will allow us to simulate the expiration check failing
        check_call.side_effect = fake_check_call

        cert_data = "I'm trying to trick you with an expired certificate!"
        ca_chain = [openssl.Certificate(c) for c in ['A CA', 'Another CA']]
        cert = openssl.Certificate(cert_data)

        valid = cert.verify(ca_chain)

        # The Certificate should show as invalid
        self.assertEqual(valid, False)
        # mkdtemp should have one call
        mkdtemp.assert_called_once_with()
        # Two NamedTemporaryFiles should have been created. One for the Certificate, and one for the
        # CA pack.
        self.assertEqual(NamedTemporaryFile.call_count, 2)
        self.assertEqual(NamedTemporaryFile.mock_calls[0][2],
                         {'mode': 'w', 'dir': a_tempdir, 'delete': False})
        self.assertEqual(NamedTemporaryFile.mock_calls[1][2],
                         {'mode': 'w', 'dir': a_tempdir, 'delete': False})
        # The cert should have been written to the first NamedTemporaryFile, and then it should
        # have been closed.
        fake_files[0].write.assert_called_once_with(cert_data)
        fake_files[0].close.assert_called_once_with()
        # The CA pack should have been written to the second NamedTemporaryFile, and then it should
        # have been closed.
        fake_files[1].write.assert_called_once_with('A CA\nAnother CA')
        fake_files[1].close.assert_called_once_with()
        # check_call should have been called twice this time, once to check expiration and once to
        # check signature.
        self.assertEqual(check_call.call_count, 2)
        # Make sure openssl was called with all the correct args to check expiration
        expected_args = ['openssl', 'x509', '-in', cert_filename, '-noout', '-checkend', '0']
        self.assertEqual(check_call.mock_calls[0][1], (expected_args,))
        self.assertEqual(check_call.mock_calls[0][2],
                         {'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE})
        # Make sure openssl was called with all the correct args to check signature
        expected_args = ['openssl', 'verify', '-CAfile', ca_filename, '-purpose', 'sslclient',
                         cert_filename]
        self.assertEqual(check_call.mock_calls[1][1], (expected_args,))
        self.assertEqual(check_call.mock_calls[1][2],
                         {'stdout': subprocess.PIPE, 'stderr': subprocess.PIPE})
        # Cleanup should have happened
        mock_rmtree.assert_called_once_with(a_tempdir)

Example 7

Project: pulp_python
Source File: test_sync.py
View license
    @mock.patch('pulp_python.plugins.importers.sync.DownloadPackagesStep.__init__',
                side_effect=sync.DownloadPackagesStep.__init__, autospec=True)
    @mock.patch('pulp_python.plugins.importers.sync.publish_step.PluginStep.__init__',
                side_effect=sync.publish_step.PluginStep.__init__, autospec=True)
    @mock.patch('pulp_python.plugins.importers.sync.SyncStep.generate_download_requests',
                autospec=True)
    def test___init___one_package(self, generate_download_requests, super___init__,
                                  download_packages___init__):
        """
        Test the __init__() method when the user has specified one package to sync.
        """
        repo = mock.MagicMock()
        repo.id = 'cool_repo'
        conduit = mock.MagicMock()
        config = mock.MagicMock()
        working_dir = '/some/dir'

        def fake_get(key, default=None):
            if key == constants.CONFIG_KEY_PACKAGE_NAMES:
                return 'numpy'
            return 'http://example.com/'

        config.get.side_effect = fake_get

        step = sync.SyncStep(repo, conduit, config, working_dir)

        # The superclass __init__ method gets called four times. Once directly by this __init__, and
        # three more times by the substeps it creates.
        self.assertEqual(super___init__.call_count, 4)
        # Let's assert that the direct call was cool.
        self.assertEqual(
            super___init__.mock_calls[0],
            mock.call(step, 'sync_step_main', repo, conduit, config, working_dir,
                      constants.IMPORTER_TYPE_ID))
        self.assertEqual(step.description, _('Synchronizing cool_repo repository.'))
        # Assert that the feed url and packages names are correct
        self.assertEqual(step._feed_url, 'http://example.com/')
        self.assertEqual(step._project_names, ['numpy'])
        self.assertEqual(step.available_units, [])
        # Three child steps should have been added
        self.assertEqual(len(step.children), 3)
        self.assertEqual(type(step.children[0]), sync.DownloadMetadataStep)
        self.assertEqual(type(step.children[1]), GetLocalUnitsStep)
        self.assertEqual(type(step.children[2]), sync.DownloadPackagesStep)
        # Make sure the steps were initialized properly
        downloads = generate_download_requests.return_value
        download_packages___init__.assert_called_once_with(
            step.children[2], 'sync_step_download_packages', downloads=downloads, repo=repo,
            config=config, conduit=conduit, working_dir=working_dir,
            description=_('Downloading and processing Python packages.'))

Example 8

Project: wp-file-analyser
Source File: analyser_tests.py
View license
@mock.patch('wpanalyser.analyser.is_wordpress')
@mock.patch('wpanalyser.analyser.os.path.exists')
@mock.patch('wpanalyser.analyser.os.makedirs')
@mock.patch('wpanalyser.analyser.find_wp_version')
@mock.patch('wpanalyser.analyser.download_wordpress')
@mock.patch('wpanalyser.analyser.unzip')
def test_process_wp_dirs(mock_unzip, mock_download_wp, mock_find_wp,
					mock_makedirs, mock_pathexists, mock_is_wordpress):
	args = mock.MagicMock()
	args.wordpress_path = 'wp'
	args.other_wordpress_path = False
	args.with_version = False

	mock_is_wordpress.side_effect = [False]
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_false(wpPath)
	assert_false(otherWpPath)

	mock_is_wordpress.side_effect = [True, False]
	args.other_wordpress_path = 'owp'
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_equal(wpPath, 'wp')
	assert_false(otherWpPath)

	mock_is_wordpress.side_effect = [True, True]
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_equal(wpPath, 'wp')
	assert_equal(otherWpPath, 'owp')	

	mock_is_wordpress.return_value = True
	mock_is_wordpress.side_effect = None

	args.other_wordpress_path = False
	mock_pathexists.return_value = False
	mock_makedirs.side_effect = OSError
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_equal(wpPath, 'wp')
	assert_false(otherWpPath)

	mock_makedirs.side_effect = None

	os.makedirs.side_effect = None
	mock_find_wp.return_value = False
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	mock_find_wp.assert_called_with(os.path.join(wpPath, wpa.WP_VERSION_FILE_PATH))
	assert_equal(wpPath, 'wp')
	assert_false(otherWpPath)

	args.with_version = '1.4.2'
	mock_download_wp.return_value = False, False
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_equal(wpPath, 'wp')
	assert_false(otherWpPath)

	mock_download_wp.return_value = True, 'wp.zip'
	mock_unzip.return_value = False
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_equal(wpPath, 'wp')
	assert_false(otherWpPath)

	mock_unzip.return_value = 'wp'
	wpPath, otherWpPath = wpa.process_wp_dirs(args)
	assert_equal(wpPath, 'wp')
	assert_equal(otherWpPath, 'wpa-temp/wp')

Example 9

Project: django-vkontakte-wall
Source File: tests.py
View license
    @mock.patch('vkontakte_users.models.User.remote.fetch', side_effect=user_fetch_mock)
    def test_fetch_group_post_changing_reposts(self, *args, **kwargs):

        group = GroupFactory(remote_id=GROUP_ID)
        post = PostFactory(remote_id=GROUP_POST_ID, owner=group)

        resources1 = [{'from_id': 1, 'date': int(time.time()) - 1000}]
        with mock.patch('vkontakte_wall.models.Post.fetch_reposts_items', side_effect=lambda **kw: resources1):
            users1 = post.fetch_reposts(all=True)
        state_time1 = post.reposts_users.last_update_time()

        self.assertEqual(post.reposts_users.count(), users1.count())
        self.assertEqual(post.reposts_users.count(), 1)
        self.assertItemsEqual(post.reposts_users.all(), User.objects.filter(remote_id__in=[1]))

        resources2 = [{'from_id': 2, 'date': int(time.time()) - 500}]
        with mock.patch('vkontakte_wall.models.Post.fetch_reposts_items', side_effect=lambda **kw: resources1 + resources2):
            users2 = post.fetch_reposts(all=True)
        state_time2 = post.reposts_users.last_update_time()

        self.assertEqual(post.reposts_users.count(), users2.count())
        self.assertEqual(post.reposts_users.count(), 2)
        self.assertItemsEqual(post.reposts_users.all(), User.objects.filter(remote_id__in=[1, 2]))

        resources3 = [{'from_id': 3, 'date': int(time.time()) - 100}]
        with mock.patch('vkontakte_wall.models.Post.fetch_reposts_items', side_effect=lambda **kw: resources3):
            users3 = post.fetch_reposts(all=True)
        state_time3 = post.reposts_users.last_update_time()

        self.assertEqual(post.reposts_users.count(), users3.count())
        self.assertEqual(post.reposts_users.count(), 1)
        self.assertItemsEqual(post.reposts_users.all(), User.objects.filter(remote_id__in=[3]))

        # check results of 3 changes
        self.assertItemsEqual(post.reposts_users.were_at(state_time1, only_pk=True), [1])
        self.assertItemsEqual(post.reposts_users.were_at(state_time2, only_pk=True), [1, 2])
        self.assertItemsEqual(post.reposts_users.were_at(state_time3, only_pk=True), [3])

        state_time_add1 = datetime.utcfromtimestamp(resources1[0]['date']).replace(tzinfo=timezone.utc)
        state_time_add2 = datetime.utcfromtimestamp(resources2[0]['date']).replace(tzinfo=timezone.utc)
        state_time_add3 = datetime.utcfromtimestamp(resources3[0]['date']).replace(tzinfo=timezone.utc)

        self.assertItemsEqual(post.reposts_users.added_at(state_time_add1, only_pk=True), [1])
        self.assertItemsEqual(post.reposts_users.added_at(state_time_add2, only_pk=True), [2])
        self.assertItemsEqual(post.reposts_users.added_at(state_time_add3, only_pk=True), [3])

        self.assertItemsEqual(post.reposts_users.removed_at(state_time1, only_pk=True), [])
        self.assertItemsEqual(post.reposts_users.removed_at(state_time2, only_pk=True), [])
        self.assertItemsEqual(post.reposts_users.removed_at(state_time3, only_pk=True), [1, 2])

        # returns user ID=2 with old date
        resources4 = resources3 + resources2
        with mock.patch('vkontakte_wall.models.Post.fetch_reposts_items', side_effect=lambda **kw: resources4):
            users4 = post.fetch_reposts(all=True)

        self.assertEqual(post.reposts_users.count(), users4.count())
        self.assertEqual(post.reposts_users.count(), 2)
        self.assertItemsEqual(post.reposts_users.all(), User.objects.filter(remote_id__in=[2, 3]))

        # changed after last fetching
        self.assertItemsEqual(post.reposts_users.were_at(state_time3, only_pk=True), [2, 3])
        self.assertItemsEqual(post.reposts_users.removed_at(state_time3, only_pk=True), [1])

Example 10

View license
    @mock.patch('batchapps.credentials.requests_oauthlib')
    @mock.patch('batchapps.credentials.BackendApplicationClient')
    @mock.patch('batchapps.credentials.Configuration')
    @mock.patch('batchapps.credentials.Credentials')
    def test_azureoauth_get_unattended_session(self,
                                 mock_creds,
                                 mock_config,
                                 mock_client,
                                 mock_req):
        """Test get_unattended_session"""

        mock_session = mock.create_autospec(
            requests_oauthlib.OAuth2Session)

        mock_req.OAuth2Session.return_value = mock_session
        mock_config.aad_config.return_value = {'root':'1/',
                                               'unattended_key':'3',
                                               'token_uri':'/auth',
                                               'resource':'test',
                                               'unattended_account':'abc'}

        with self.assertRaises(InvalidConfigException):
            AzureOAuth.get_unattended_session(mock_config)

        mock_config.aad_config.return_value['unattended_account'] = 'ClientID=abc;TenantID=common'
        AzureOAuth.get_unattended_session(mock_config)
        mock_client.assert_called_with("abc")
        mock_req.OAuth2Session.assert_called_with("abc", client=mock.ANY)
        mock_session.fetch_token.assert_called_with(
            "https://1/common/auth",
            client_id='abc',
            resource='https://test',
            client_secret='3',
            response_type='client_credentials',
            verify=True)

        mock_config.aad_config.return_value = {'root':'http://1/',
                                               'unattended_key':'3',
                                               'token_uri':'/auth',
                                               'resource':'https://test',
                                               'unattended_account':'ClientID=abc;TenantID=common'}

        AzureOAuth.get_unattended_session(mock_config)
        mock_client.assert_called_with("abc")
        mock_req.OAuth2Session.assert_called_with("abc", client=mock.ANY)
        mock_session.fetch_token.assert_called_with(
            "https://1/common/auth",
            client_id='abc',
            resource='https://test',
            client_secret='3',
            response_type='client_credentials',
            verify=True)

        credentials.CA_CERT = "cacert.pem"
        AzureOAuth.get_unattended_session(mock_config)
        mock_client.assert_called_with("abc")
        mock_req.OAuth2Session.assert_called_with("abc", client=mock.ANY)
        mock_session.fetch_token.assert_called_with(
            "https://1/common/auth",
            client_id='abc',
            resource='https://test',
            client_secret='3',
            response_type='client_credentials',
            verify="cacert.pem")
        
        credentials.VERIFY = False
        AzureOAuth.get_unattended_session(mock_config)
        mock_client.assert_called_with("abc")
        mock_req.OAuth2Session.assert_called_with("abc", client=mock.ANY)
        mock_session.fetch_token.assert_called_with(
            "https://1/common/auth",
            client_id='abc',
            resource='https://test',
            client_secret='3',
            response_type='client_credentials',
            verify=False)

Example 11

Project: rockstor-core
Source File: test_shares.py
View license
    @mock.patch('storageadmin.views.share.remove_share')
    @mock.patch('storageadmin.views.share.SFTP')
    @mock.patch('storageadmin.views.share.SambaShare')
    @mock.patch('storageadmin.views.share.NFSExport')
    @mock.patch('storageadmin.views.share.Snapshot')
    def test_delete_set1(self, mock_snapshot, mock_nfs, mock_samba, mock_sftp, mock_remove_share):
        """
        Test DELETE request on share
        1. Create valid share
        2. Delete share with replication related snapshots
        3. Delete share with NFS export
        4. Delete share that is shared via Samba
        5. Delete share with snapshots
        6. Delete share with SFTP export
        7. Delete share with remove_share failure (share still mounted)
        8. Delete nonexistent share
        """
        # create share
        data = {'sname': 'rootshare', 'pool': 'rockstor_rockstor', 'size': 100}
        response = self.client.post(self.BASE_URL, data=data)
        self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data)
        self.assertEqual(response.data['name'], 'rootshare')
        share = Share.objects.get(name='rootshare')

        # Delete share with replication related snapshots
        mock_snapshot.objects.filter(share=share, snap_type='replication').exists.return_value = True
        e_msg = ('Share(rootshare) cannot be deleted as it has replication related snapshots.')
        response2 = self.client.delete('%s/rootshare' % self.BASE_URL)
        self.assertEqual(response2.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response2.data)
        self.assertEqual(response2.data['detail'], e_msg)
        mock_snapshot.objects.filter(share=share, snap_type='replication').exists.return_value = False

        # Delete share with NFS export
        mock_nfs.objects.filter(share=share).exists.return_value = True
        e_msg = ('Share(rootshare) cannot be deleted as it is exported via nfs. '
                 'Delete nfs exports and try again')
        response3 = self.client.delete('%s/rootshare' % self.BASE_URL)
        self.assertEqual(response3.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response3.data)
        self.assertEqual(response3.data['detail'], e_msg)
        mock_nfs.objects.filter(share=share).exists.return_value = False

        # Delete share that is shared via Samba
        mock_samba.objects.filter(share=share).exists.return_value = True
        e_msg = ('Share(rootshare) cannot be deleted as it is shared via Samba. '
                 'Unshare and try again')
        response4 = self.client.delete('%s/rootshare' % self.BASE_URL)
        self.assertEqual(response4.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response4.data)
        self.assertEqual(response4.data['detail'], e_msg)
        mock_samba.objects.filter(share=share).exists.return_value = False

        # Delete share with SFTP export
        mock_sftp.objects.filter(share=share).exists.return_value = True
        e_msg = ('Share(rootshare) cannot be deleted as it is exported via '
                 'SFTP. Delete SFTP export and try again')
        response6 = self.client.delete('%s/rootshare' % self.BASE_URL)
        self.assertEqual(response6.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response6.data)
        self.assertEqual(response6.data['detail'], e_msg)
        mock_sftp.objects.filter(share=share).exists.return_value = False

        # Delete share with snapshots
        # TODO this test get triggered by check for snap_type='replication'
        mock_snapshot.objects.filter(share=share, snap_type='admin').exists.return_value = True
        e_msg = ('Share(rootshare) cannot be deleted as it has snapshots. '
                 'Delete snapshots and try again')
        response5 = self.client.delete('%s/rootshare' % self.BASE_URL)
        self.assertEqual(response5.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response5.data)
        self.assertEqual(response5.data['detail'], e_msg)
        mock_snapshot.objects.filter(share=share, snap_type='admin').exists.return_value = False

        # delete a share that doesn't exist
        e_msg = ('Share(invalid) does not exist')
        response9 = self.client.delete('%s/invalid' % self.BASE_URL)
        self.assertEqual(response9.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response9.data)
        self.assertEqual(response9.data['detail'], e_msg)

Example 12

Project: scalarizr
Source File: test_eph.py
View license
    @mock.patch('scalarizr.linux.coreutils.df')
    @mock.patch('scalarizr.storage2.volumes.eph.os.rmdir')
    @mock.patch('scalarizr.storage2.volumes.eph.tempfile')
    @mock.patch('scalarizr.storage2.volumes.eph.cloudfs')
    @mock.patch('scalarizr.storage2.volumes.eph.EphVolume.mkfs')
    def test_ensure_from_snap(self, mkfs, cfs, tfile, rmdir, df, storage2):
        storage2.StorageError = Exception
        snap = storage2.snapshot.return_value
        snap.size = 5000
        disk = mock.MagicMock()
        vol = eph.EphVolume(type='eph', snap=snapshot, size='80%',
                                                vg='mongo', disk=disk)

        tmp_mpoint = 'test_temp'
        df.return_value = [mock.MagicMock(), mock.MagicMock(),
                                           mock.MagicMock(mpoint=tmp_mpoint, free=10000)]
        tfile.mkdtemp.return_value = tmp_mpoint
        with mock.patch.multiple(vol, mount=mock.DEFAULT, umount=mock.DEFAULT):
            vol.ensure()
            vol.mount.assert_called_once_with()
            vol.umount.assert_called_once_with()


        storage2.snapshot.assert_called_once_with(snapshot)
        snap = storage2.snapshot.return_value
        storage2.volume.assert_called_once_with(
                pvs=[disk], size='80%VG', vg='mongo', name='data', type='lvm'
        )
        lvm_vol = storage2.volume.return_value
        lvm_vol.ensure.assert_called_once_with()
        self.assertEqual(lvm_vol.device, vol.device)
        mkfs.assert_called_once_with()

        tfile.mkdtemp.assert_called_once_with()

        cfs.LargeTransfer.assert_called_once_with(snap.path, tmp_mpoint + '/')
        tr = cfs.LargeTransfer.return_value

        tr.run.assert_called_once
        rmdir.assert_called_once_with(tmp_mpoint)

        self.assertEqual(vol.mpoint, None)


        """ Cleanup on transfer failure """
        tr.run.side_effect = Exception
        rmdir.reset_mock()
        vol = eph.EphVolume(type='eph', snap=snapshot, size='80%',
                                                vg='mongo', disk=disk)

        with mock.patch.multiple(vol, mount=mock.DEFAULT, umount=mock.DEFAULT):
            self.assertRaises(storage2.StorageError, vol.ensure)
            vol.mount.assert_called_once_with()
            vol.umount.assert_called_once_with()

        rmdir.assert_called_once_with(tmp_mpoint)

        """ Snapshot size bigger than free space"""
        tr.reset_mock()
        snap.size = 20000
        vol = eph.EphVolume(type='eph', snap=snapshot, size='80%',
                                                vg='mongo', disk=disk)
        with mock.patch.multiple(vol, mount=mock.DEFAULT, umount=mock.DEFAULT):
            self.assertRaisesRegexp(storage2.StorageError,
                                                            'Not enough free space',
                                                            vol.ensure)
        self.assertFalse(tr.mock_calls)

Example 13

Project: sd-agent
Source File: test_service_discovery.py
View license
    def test_fill_tpl(self):
        """Test _fill_tpl with mocked docker client"""

        valid_configs = [
            # ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))
            (({}, {'host': 'localhost'}, [], None), ({'host': 'localhost'}, {})),
            (
                ({'NetworkSettings': {'IPAddress': ''}}, {'host': 'localhost'}, [], None),
                ({'host': 'localhost'}, {})
            ),
            (
                ({'NetworkSettings': {'Networks': {}}}, {'host': 'localhost'}, [], None),
                ({'host': 'localhost'}, {})
            ),
            (
                ({'NetworkSettings': {'Networks': {'bridge': {}}}}, {'host': 'localhost'}, [], None),
                ({'host': 'localhost'}, {})
            ),
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1'}},
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
            ),
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Networks': {}}},
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
            ),
            (
                ({'NetworkSettings': {
                    'IPAddress': '127.0.0.1',
                    'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}
                  },
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
            ),
            (
                ({'NetworkSettings': {
                    'IPAddress': '',
                    'Networks': {
                        'bridge': {'IPAddress': '172.17.0.2'},
                        'foo': {'IPAddress': '192.168.0.2'}
                    }}
                  },
                 {'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
                ({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
                 {'host_bridge': '172.17.0.2'}),
            ),
            (
                ({'NetworkSettings': {
                    'IPAddress': '',
                    'Networks': {
                        'bridge': {'IPAddress': '172.17.0.2'},
                        'foo': {'IPAddress': '192.168.0.2'}
                    }}
                  },
                 {'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
                ({'host': '%%host_foo%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
                 {'host_foo': '192.168.0.2'}),
            ),
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
                 {'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test']},
                 ['host', 'port_1'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test', 'foo', 'bar:baz']},
                 {'host': '127.0.0.1', 'port_1': '42'})
            )
        ]

        # should not fail but return something specific
        edge_cases = [
            # ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))

            # specify bridge but there is also a default IPAddress (networks should be preferred)
            (
                ({'NetworkSettings': {
                    'IPAddress': '127.0.0.1',
                    'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
                 {'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
                ({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
                 {'host_bridge': '172.17.0.2'})
            ),
            # specify index but there is a default IPAddress (there's a specifier, even if it's wrong, walking networks should be preferred)
            (
                ({'NetworkSettings': {
                    'IPAddress': '127.0.0.1',
                    'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
                 {'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
                ({'host': '%%host_0%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host_0': '172.17.0.2'}),
            ),
            # missing key for host, bridge network should be preferred
            (
                ({'NetworkSettings': {'Networks': {
                    'bridge': {'IPAddress': '127.0.0.1'},
                    'foo': {'IPAddress': '172.17.0.2'}}}},
                 {'host': '%%host_bar%%', 'port': 1337}, ['host_bar'], []),
                ({'host': '%%host_bar%%', 'port': 1337}, {'host_bar': '127.0.0.1'}),
            ),
            # missing index for port
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
                 {'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test']},
                 ['host', 'port_2'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test', 'foo', 'bar:baz']},
                 {'host': '127.0.0.1', 'port_2': '42'})
            )
        ]

        # should raise
        invalid_config = [
            # ((inspect, instance_tpl, variables, tags), expected_exception)

            # template variable but no IPAddress available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                Exception,
            ),
            # index but no IPAddress available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
                Exception,
            ),
            # key but no IPAddress available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
                Exception,
            ),

            # template variable but no port available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': 'localhost', 'port': '%%port%%'}, ['port'], []),
                Exception,
            ),
            # index but no port available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': 'localhost', 'port_0': '%%port%%'}, ['port_0'], []),
                Exception,
            ),
            # key but no port available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': 'localhost', 'port': '%%port_foo%%'}, ['port_foo'], []),
                Exception,
            )
        ]

        with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
            with mock.patch.object(EtcdStore, 'get_client', return_value=None):
                with mock.patch.object(ConsulStore, 'get_client', return_value=None):
                    for ac in self.agentConfigs:
                        sd_backend = get_sd_backend(agentConfig=ac)
                        try:
                            for co in valid_configs + edge_cases:
                                inspect, tpl, variables, tags = co[0]
                                instance_tpl, var_values = sd_backend._fill_tpl(inspect, tpl, variables, tags)
                                for key in instance_tpl.keys():
                                    if isinstance(instance_tpl[key], list):
                                        self.assertEquals(len(instance_tpl[key]), len(co[1][0].get(key)))
                                        for elem in instance_tpl[key]:
                                            self.assertTrue(elem in co[1][0].get(key))
                                    else:
                                        self.assertEquals(instance_tpl[key], co[1][0].get(key))
                                self.assertEquals(var_values, co[1][1])

                            for co in invalid_config:
                                inspect, tpl, variables, tags = co[0]
                                self.assertRaises(co[1], sd_backend._fill_tpl(inspect, tpl, variables, tags))

                            clear_singletons(ac)
                        except Exception:
                            clear_singletons(ac)
                            raise

Example 14

Project: pybrightcove
Source File: test_ftp_video.py
View license
    @mock.patch('ftplib.FTP')
    @mock.patch('hashlib.md5')
    @mock.patch('os.path.getsize')
    @mock.patch('__builtin__.file')
    @mock.patch("os.fdopen")
    def test_batch_provision_with_custom_metadata_video(self, FDOpenMockClass, OpenMockClass, 
        GetSizeMockClass, Md5MockClass, FTPMockClass):
        fd = FDOpenMockClass()
        o = OpenMockClass()
        o.read.return_value = None
        m = Md5MockClass()
        m.hexdigest.return_value = 'a78fa9f8asd'
        GetSizeMockClass.return_value = 10000
        f = FTPMockClass()

        ftp = connection.FTPConnection(host='host',
                            user='user',
                            password='pass',
                            publisher_id='111111111',
                            preparer='Patrick',
                            report_success=True)
        v = video.Video(name="Some title",
                  reference_id='a532kallk3252a',
                  short_description="A short description.",
                  _connection=ftp)
        v.long_description = "An even longer description"
        v.tags.extend(["blah", "nah", "tag"])
        v.add_asset('1500.flv',
                enums.AssetTypeEnum.VIDEO_FULL, 'High quality rendition',
                encoding_rate=1500000, frame_width=640,
                frame_height=360)
        v.add_asset('700.flv',
                enums.AssetTypeEnum.VIDEO_FULL, 'Medium quality rendition',
                encoding_rate=700000, frame_width=640,
                frame_height=360)
        v.add_asset('poster.png',
                enums.AssetTypeEnum.VIDEO_STILL, 'Poster frame',
                frame_width=640, frame_height=360)

        v.add_custom_metadata("enum_one", "Value One", enums.CustomMetaType.ENUM)
        v.add_custom_metadata("enum_two", "Value Two", enums.CustomMetaType.ENUM)
        v.add_custom_metadata("key_one", "String Value One", enums.CustomMetaType.STRING)
        v.add_custom_metadata("key_two", "String Value Two", enums.CustomMetaType.STRING)
        v.save()

        self.assertEqual('login', f.method_calls[0][0])
        self.assertEqual('set_pasv', f.method_calls[1][0])
        self.assertEqual('storbinary', f.method_calls[2][0])
        self.assertEqual('STOR 1500.flv', f.method_calls[2][1][0])

        self.assertEqual('login', f.method_calls[3][0])
        self.assertEqual('set_pasv', f.method_calls[4][0])
        self.assertEqual('storbinary', f.method_calls[5][0])
        self.assertEqual('STOR 700.flv', f.method_calls[5][1][0])

        self.assertEqual('login', f.method_calls[6][0])
        self.assertEqual('set_pasv', f.method_calls[7][0])
        self.assertEqual('storbinary', f.method_calls[8][0])
        self.assertEqual('STOR poster.png', f.method_calls[8][1][0])

        self.assertEqual('write', fd.method_calls[0][0])
        valid_xml = minidom.parse(
            open(os.path.join(os.path.dirname(__file__), 'test_ftp_video_batch_provision_with_custom_metadata_manifest.xml'), 'rb'))
        test_xml = minidom.parseString(fd.method_calls[0][1][0])
        self.assertEqual(
            valid_xml.toxml().replace('\t', '').replace('\n', ''),
            test_xml.toxml().replace('\t', '').replace('\n', ''))

Example 15

Project: wercker-cli
Source File: test_cli.py
View license
    @mock.patch('werckercli.authentication.get_access_token', mock.Mock())
    @mock.patch('werckercli.commands.create.create', mock.Mock())
    @mock.patch('werckercli.commands.login.login', mock.Mock())
    @mock.patch('werckercli.authentication.get_access_token', mock.Mock())
    @mock.patch(
        'werckercli.commands.clearsettings.clear_settings',
        mock.Mock()
    )
    @mock.patch(
        'werckercli.commands.project.project_list',
        mock.Mock()
    )
    def test_implemented_base_commands(self):
        my_cli = cli
        with mock.patch('werckercli.commands.target.add', mock.Mock()):
            with mock.patch(
                'werckercli.authentication.get_access_token',
                mock.Mock()
            ):

                # create
                my_cli.handle_commands(
                    {
                        'apps': False,
                        'status': False,
                        'create': True,
                        'deploy': False,
                        'builds': False,
                        'logout': False,
                        'login': False,
                        'targets': False,
                    }
                )

                #app create
                my_cli.handle_commands(
                    {
                        'apps': True,
                        'status': False,
                        'create': True,
                        'builds': False,
                        'deploy': False,
                        'logout': False,
                        'login': False,
                        'targets': False,
                    }
                )

                # logout
                my_cli.handle_commands(
                    {
                        'apps': False,
                        'status': False,
                        'create': False,
                        'builds': False,
                        'deploy': False,
                        'logout': True,
                        'login': False,
                        'targets': False,
                    }
                )

                my_cli.handle_commands(
                    {
                        'apps': False,
                        'status': False,
                        'add': False,
                        'builds': False,
                        'create': False,
                        'deploy': False,
                        'logout': False,
                        'login': True,
                        'targets': False,
                    }
                )

                # deploy add
                my_cli.handle_commands(
                    {
                        'add': True,
                        'apps': False,
                        'status': False,
                        'builds': False,
                        'create': False,
                        'deploy': False,
                        'logout': False,
                        'login': False,
                        'targets': True,
                    }
                )

Example 16

Project: elastalert
Source File: test_rule.py
View license
    def run_elastalert(self, rule, conf, args):
        """ Creates an ElastAlert instance and run's over for a specific rule using either real or mock data. """

        # Load and instantiate rule
        load_modules(rule)
        conf['rules'] = [rule]

        # If using mock data, make sure it's sorted and find appropriate time range
        timestamp_field = rule.get('timestamp_field', '@timestamp')
        if args.json:
            if not self.data:
                return None
            try:
                self.data.sort(key=lambda x: x[timestamp_field])
                starttime = ts_to_dt(self.data[0][timestamp_field])
                endtime = self.data[-1][timestamp_field]
                endtime = ts_to_dt(endtime) + datetime.timedelta(seconds=1)
            except KeyError as e:
                print("All documents must have a timestamp and _id: %s" % (e), file=sys.stderr)
                return None

            # Create mock _id for documents if it's missing
            used_ids = []

            def get_id():
                _id = ''.join([random.choice(string.letters) for i in range(16)])
                if _id in used_ids:
                    return get_id()
                used_ids.append(_id)
                return _id

            for doc in self.data:
                doc.update({'_id': doc.get('_id', get_id())})
        else:
            endtime = ts_now()
            starttime = endtime - datetime.timedelta(days=args.days)

        # Set run_every to cover the entire time range unless use_count_query or use_terms_query is set
        # This is to prevent query segmenting which unnecessarily slows down tests
        if not rule.get('use_terms_query') and not rule.get('use_count_query'):
            conf['run_every'] = endtime - starttime

        # Instantiate ElastAlert to use mock config and special rule
        with mock.patch('elastalert.elastalert.get_rule_hashes'):
            with mock.patch('elastalert.elastalert.load_rules') as load_conf:
                load_conf.return_value = conf
                if args.alert:
                    client = ElastAlerter(['--verbose'])
                else:
                    client = ElastAlerter(['--debug'])

        # Replace get_hits_* functions to use mock data
        if args.json:
            self.mock_elastalert(client)

        # Mock writeback for both real data and json data
        client.writeback_es = None
        with mock.patch.object(client, 'writeback') as mock_writeback:
            client.run_rule(rule, endtime, starttime)

            if mock_writeback.call_count:
                print("\nWould have written the following documents to writeback index (default is elastalert_status):\n")
                for call in mock_writeback.call_args_list:
                    print("%s - %s\n" % (call[0][0], call[0][1]))

Example 17

Project: mycroft
Source File: test_worker.py
View license
    @pytest.mark.parametrize("action_req", [
        ({'cancel_requested': True, 'pause_requested': False, 'delete_requested': False}),
        ({'cancel_requested': False, 'pause_requested': True, 'delete_requested': False}),
        ({'cancel_requested': True, 'pause_requested': True, 'delete_requested': False}),
    ])
    def test__process_msg(self, get_worker, action_req):
        worker = get_worker
        with mock.patch(
            'mycroft.backend.worker.base_worker.PoolExtended',
            autospec=True,
        ) as mock_pool:

            scheduled_runs = set()

            def invoke_callback(wait_sec):
                # wait releases lock and so do we
                worker._cond.release()

                # mock object stores all calls to apply_async from start
                # of test.  We only want to track calls made since latest
                # invokation of _process_msg.  We do this by tracking calls
                # via scheduled_runs set variable.
                all_apply_async_calls = [c for c in mock_pool.mock_calls
                                         if c[0] == '().apply_async']
                assert len(all_apply_async_calls) > 0
                for c in reversed(all_apply_async_calls):
                    kwargs = c[2]
                    if str(kwargs) in scheduled_runs:
                        break
                    scheduled_runs.add(str(kwargs))

                    date = datetime_to_date_string(kwargs['args'][0].start_date)
                    kwargs['callback']([
                        dict(SUCCESS_RECORD.items() + {'date': date}.items())
                    ])

                # acquire lock when resume from wait
                worker._cond.acquire()

            with mock.patch.object(worker._cond, 'wait',
                                   side_effect=invoke_callback, autospec=True):
                msg_dict = SAMPLE_JSON_SQS_MSG.get_body()
                start = msg_dict['script_start_date_arg']
                end = msg_dict['script_end_date_arg']
                step = msg_dict['step']
                results_expected = date_string_total_items(start, end, step=step)
                results, _ = worker._process_msg(SAMPLE_JSON_SQS_MSG)
                assert len(results) == results_expected

                # test cancel
                class TestJob(WorkerJob):
                    def update_action_requests(self):
                        self.actions = action_req

                with mock.patch.object(worker, 'create_worker_job',
                                       return_value=TestJob(worker, {}, worker._num_processes),
                                       autospec=True):
                    result, action_results = worker._process_msg(SAMPLE_JSON_SQS_MSG)
                    # we use 1 for True due to backend storage issues
                    assert action_results['cancel_requested'] == action_req['cancel_requested']
                    assert action_results['pause_requested'] == action_req['pause_requested']

Example 18

Project: paasta
Source File: test_monitoring_tools.py
View license
    def test_send_event(self):
        fake_service = 'fake_service'
        fake_monitoring_overrides = {}
        fake_check_name = 'fake_check_name'
        fake_status = '42'
        fake_output = 'The http port is not open'
        fake_team = 'fake_team'
        fake_tip = 'fake_tip'
        fake_notification_email = '[email protected]'
        fake_irc = '#fake'
        fake_soa_dir = '/fake/soa/dir'
        self.fake_cluster = 'fake_cluster'
        fake_sensu_host = 'fake_sensu_host'
        fake_sensu_port = 12345
        expected_runbook = 'http://y/paasta-troubleshooting'
        expected_check_name = fake_check_name
        expected_kwargs = {
            'tip': fake_tip,
            'notification_email': fake_notification_email,
            'irc_channels': fake_irc,
            'project': None,
            'ticket': False,
            'page': True,
            'alert_after': '5m',
            'check_every': '1m',
            'realert_every': -1,
            'source': 'paasta-fake_cluster',
            'ttl': None,
        }
        with contextlib.nested(
            mock.patch(
                "paasta_tools.monitoring_tools.get_team",
                return_value=fake_team,
                autospec=True,
            ),
            mock.patch(
                "paasta_tools.monitoring_tools.get_tip",
                return_value=fake_tip,
                autospec=True,
            ),
            mock.patch(
                "paasta_tools.monitoring_tools.get_notification_email",
                return_value=fake_notification_email,
                autospec=True,
            ),
            mock.patch(
                "paasta_tools.monitoring_tools.get_irc_channels",
                return_value=fake_irc,
                autospec=True,
            ),
            mock.patch(
                "paasta_tools.monitoring_tools.get_ticket",
                return_value=False,
                autospec=True,
            ),
            mock.patch(
                "paasta_tools.monitoring_tools.get_project",
                return_value=None,
                autospec=True,
            ),
            mock.patch(
                "paasta_tools.monitoring_tools.get_page",
                return_value=True,
                autospec=True,
            ),
            mock.patch("pysensu_yelp.send_event", autospec=True),
            mock.patch('paasta_tools.monitoring_tools.load_system_paasta_config', autospec=True),
        ) as (
            get_team_patch,
            get_tip_patch,
            get_notification_email_patch,
            get_irc_patch,
            get_ticket_patch,
            get_project_patch,
            get_page_patch,
            pysensu_yelp_send_event_patch,
            load_system_paasta_config_patch,
        ):
            load_system_paasta_config_patch.return_value.get_cluster = mock.Mock(return_value=self.fake_cluster)
            load_system_paasta_config_patch.return_value.get_sensu_host = mock.Mock(return_value=fake_sensu_host)
            load_system_paasta_config_patch.return_value.get_sensu_port = mock.Mock(return_value=fake_sensu_port)

            monitoring_tools.send_event(
                fake_service,
                fake_check_name,
                fake_monitoring_overrides,
                fake_status,
                fake_output,
                fake_soa_dir
            )

            get_team_patch.assert_called_once_with(
                fake_monitoring_overrides,
                fake_service,
                fake_soa_dir,
            )
            get_tip_patch.assert_called_once_with(
                fake_monitoring_overrides,
                fake_service,
                fake_soa_dir
            )
            get_notification_email_patch.assert_called_once_with(
                fake_monitoring_overrides,
                fake_service,
                fake_soa_dir
            )
            get_irc_patch.assert_called_once_with(
                fake_monitoring_overrides,
                fake_service,
                fake_soa_dir
            )
            get_page_patch.assert_called_once_with(
                fake_monitoring_overrides,
                fake_service,
                fake_soa_dir
            )
            pysensu_yelp_send_event_patch.assert_called_once_with(
                expected_check_name,
                expected_runbook,
                fake_status,
                fake_output,
                fake_team,
                sensu_host=fake_sensu_host,
                sensu_port=fake_sensu_port,
                **expected_kwargs
            )
            load_system_paasta_config_patch.return_value.get_cluster.assert_called_once_with()

Example 19

Project: py_zipkin
Source File: logging_helper_test.py
View license
@mock.patch('py_zipkin.logging_helper.time.time', autospec=True)
@mock.patch('py_zipkin.logging_helper.log_span', autospec=True)
@mock.patch('py_zipkin.logging_helper.annotation_list_builder',
            autospec=True)
@mock.patch('py_zipkin.logging_helper.binary_annotation_list_builder',
            autospec=True)
@mock.patch('py_zipkin.logging_helper.copy_endpoint_with_new_service_name',
            autospec=True)
def test_zipkin_logging_context_log_spans(
    copy_endpoint_mock, bin_ann_list_builder, ann_list_builder,
    log_span_mock, time_mock
):
    # This lengthy function tests that the logging context properly
    # logs both client and server spans, while attaching extra annotations
    # logged throughout the context of the trace.
    trace_id = '000000000000000f'
    parent_span_id = '0000000000000001'
    server_span_id = '0000000000000002'
    client_span_id = '0000000000000003'
    client_span_name = 'breadcrumbs'
    client_svc_name = 'svc'
    attr = ZipkinAttrs(
        trace_id=trace_id,
        span_id=server_span_id,
        parent_span_id=parent_span_id,
        flags=None,
        is_sampled=True,
    )
    handler = logging_helper.ZipkinLoggerHandler(attr)
    extra_server_annotations = {
        'parent_span_id': None,
        'annotations': {'foo': 1},
        'binary_annotations': {'what': 'whoa'},
    }
    extra_client_annotations = {
        'parent_span_id': client_span_id,
        'annotations': {'ann1': 1},
        'binary_annotations': {'bann1': 'aww'},
    }
    handler.extra_annotations = [
        extra_server_annotations,
        extra_client_annotations,
    ]
    handler.client_spans = [{
        'span_id': client_span_id,
        'parent_span_id': None,
        'span_name': client_span_name,
        'service_name': client_svc_name,
        'annotations': {'ann2': 2},
        'binary_annotations': {'bann2': 'yiss'},
    }]

    # Each of the thrift annotation helpers just reflects its first arg
    # so the annotation dicts can be checked.
    ann_list_builder.side_effect = lambda x, y: x
    bin_ann_list_builder.side_effect = lambda x, y: x

    transport_handler = mock.Mock()

    context = logging_helper.ZipkinLoggingContext(
        attr, 'thrift_endpoint', handler, 'GET /foo', transport_handler)

    context.start_timestamp = 24
    context.response_status_code = 200

    context.binary_annotations_dict = {'k': 'v'}
    time_mock.return_value = 42

    expected_server_annotations = {'foo': 1, 'sr': 24, 'ss': 42}
    expected_server_bin_annotations = {'k': 'v', 'what': 'whoa'}

    expected_client_annotations = {'ann1': 1, 'ann2': 2}
    expected_client_bin_annotations = {'bann1': 'aww', 'bann2': 'yiss'}

    context.log_spans()
    client_log_call, server_log_call = log_span_mock.call_args_list
    assert server_log_call[1] == {
        'span_id': server_span_id,
        'parent_span_id': parent_span_id,
        'trace_id': trace_id,
        'span_name': 'GET /foo',
        'annotations': expected_server_annotations,
        'binary_annotations': expected_server_bin_annotations,
        'transport_handler': transport_handler,
    }
    assert client_log_call[1] == {
        'span_id': client_span_id,
        'parent_span_id': server_span_id,
        'trace_id': trace_id,
        'span_name': client_span_name,
        'annotations': expected_client_annotations,
        'binary_annotations': expected_client_bin_annotations,
        'transport_handler': transport_handler,
    }

Example 20

Project: pushmanager
Source File: test_core_git.py
View license
    def _pickme_conflict_pickme_integration(self, request_state):
        test_settings = copy.deepcopy(Settings)
        repo_path = tempfile.mkdtemp(prefix="pushmanager")
        self.temp_git_dirs.append(repo_path)
        test_settings['git']['local_repo_path'] = repo_path

        # Create a repo with two conflicting branches
        GitCommand('init', test_settings['git']['local_repo_path'], cwd=repo_path).run()
        # Prevent Git complaints about names
        GitCommand('config', 'user.email', '[email protected]', cwd=repo_path).run()
        GitCommand('config', 'user.name', 'pushmanager tester', cwd=repo_path).run()
        with open(os.path.join(repo_path, "code.py"), 'w') as f:
            f.write('#!/usr/bin/env python\n\nprint("Hello World!")\nPrint("Goodbye!")\n')
        GitCommand('add', repo_path, cwd=repo_path).run()
        GitCommand('commit', '-a', '-m', 'Master Commit', cwd=repo_path).run()

        GitCommand('checkout', '-b', 'change_german', cwd=repo_path).run()
        with open(os.path.join(repo_path, "code.py"), 'w') as f:
            f.write('#!/usr/bin/env python\n\nprint("Hallo Welt!")\nPrint("Goodbye!")\n')
        GitCommand('commit', '-a', '-m', 'verpflichten', cwd=repo_path).run()
        GitCommand('checkout', 'master', cwd=repo_path).run()
        german_req = {
            'id': 1,
            'state': request_state,
            'user': 'test',
            'tags': 'git-ok,no-conflicts',
            'title': 'German',
            'repo': '.',
            'branch': 'change_german'
        }

        GitCommand('checkout', '-b', 'change_welsh', cwd=repo_path).run()
        with open(os.path.join(repo_path, "code.py"), 'w') as f:
            f.write('#!/usr/bin/env python\n\nprint("Helo Byd!")\nPrint("Goodbye!")\n')
        GitCommand('commit', '-a', '-m', 'ymrwymo', cwd=repo_path).run()
        GitCommand('checkout', 'master', cwd=repo_path).run()
        welsh_req = {
            'id': 2,
            'state': request_state,
            'user': 'test',
            'user': 'test',
            'tags': 'git-ok,no-conflicts',
            'title': 'Welsh',
            'repo': '.',
            'branch': 'change_welsh'
        }

        # Create a test branch for merging
        GitCommand('checkout', '-b', 'test_pcp', cwd=repo_path).run()

        # Merge on the first pickme
        with mock.patch('pushmanager.core.git.GitQueue.create_or_update_local_repo') as update_repo:
            pushmanager.core.git.GitQueue.git_merge_pickme(0, german_req, repo_path)
            update_repo.assert_called_with(0, '.', 'change_german', checkout=False)

        with nested(
                mock.patch('pushmanager.core.git.GitQueue._get_push_for_request'),
                mock.patch('pushmanager.core.git.GitQueue._get_request_ids_in_push'),
                mock.patch('pushmanager.core.git.GitQueue._get_request'),
                mock.patch('pushmanager.core.git.GitQueue._get_branch_sha_from_repo'),
                mock.patch('pushmanager.core.git.GitQueue._sha_exists_in_master'),
                mock.patch('pushmanager.core.git.GitQueue.create_or_update_local_repo'),
                mock.patch('pushmanager.core.git.GitQueue._update_request'),
                mock.patch.dict(Settings, test_settings, clear=True)
        ) as (p_for_r, r_in_p, get_req, get_sha, sha_exists, _, update_req, _):
            p_for_r.return_value = {'push': 1}
            r_in_p.return_value = [1, 2]
            get_req.return_value = welsh_req
            get_sha.return_value = "0"*40
            sha_exists.return_value = False
            update_req.return_value = german_req
            conflict, _ = pushmanager.core.git.GitQueue._test_pickme_conflict_pickme(
                0,
                german_req,
                "test_pcp",
                repo_path,
                pushmanager_url,
                False
            )
            return (conflict, update_req.call_args)

Example 21

Project: zerodb
Source File: test_db.py
View license
def test_auto_reindex(db):
    with transaction.manager:
        page = Page(title="hello", text="autoreindex0, test whether to work")
        db.add(page)
    assert len(db[Page].query(Contains("text", "autoreindex0"))) == 1

    with transaction.manager:
        page.text = "autoreindex1, test whether to work"
    assert len(db[Page].query(Contains("text", "autoreindex0"))) == 0
    assert len(db[Page].query(Contains("text", "autoreindex1"))) == 1

    with transaction.manager:
        page2 = Page(title="hello", text="autoreindex2, test whether to work")
        db.add(page2)

    with transaction.manager:
        page.text = "autoreindex3, test whether to work"
        page2.text = "autoreindex3, test whether to work"
    assert len(db[Page].query(Contains("text", "autoreindex1") | Contains("text", "autoreindex2"))) == 0
    assert len(db[Page].query(Contains("text", "autoreindex3"))) == 2

    with mock.patch("zerodb.db.DbModel.reindex_one") as reindex_mock:
        with transaction.manager:
            page.text = "autoreindex3, test whether to work1"
            page.text = "autoreindex3, test whether to work2"
            page.text = "autoreindex3, test whether to work3"
        assert reindex_mock.call_count == 1

    db.enableAutoReindex(False)
    with transaction.manager:
        page.text = "autoreindex4, test whether to work"
    assert len(db[Page].query(Contains("text", "autoreindex3"))) == 2
    assert len(db[Page].query(Contains("text", "autoreindex4"))) == 0

    db.enableAutoReindex(True)
    with transaction.manager:    # should not throw ModleException
        page3 = Page(title="helloworld", text="autoreindex5, test whether to work")
        page3.title = "helloworld1"
    assert len(db[Page].query(Eq("title", "helloworld"))) == 0
    assert len(db[Page].query(Eq("title", "helloworld1"))) == 0

    with mock.patch("zerodb.db.DbModel.reindex_one") as reindex_mock:
        with transaction.manager:  # should not reindex
            page3 = Page(title="helloworld", text="autoreindex5, test whether to work")
            page3.title = "helloworld1"
            db.add(page3)
        assert reindex_mock.call_count == 0

    with transaction.manager:  # should  reindex
        page3 = Page(title="helloworld", text="autoreindex6, test whether to work")
        db.add(page3)
        page3.title = "helloworld1"
        page3.text = "autoreindex7, test whether to work"
    assert len(db[Page].query(Eq("title", "helloworld"))) == 0
    assert len(db[Page].query(Eq("title", "helloworld1"))) == 2
    assert len(db[Page].query(Contains("text", "autoreindex6"))) == 0
    assert len(db[Page].query(Contains("text", "autoreindex7"))) == 1

    with mock.patch("zerodb.db.DbModel.reindex_one") as reindex_mock:
        with transaction.manager:  # should  reindex
            page3 = Page(title="helloworld", text="autoreindex6, test whether to work")
            db.add(page3)
            page3.title = "helloworld1"
            page3.text = "autoreindex7, test whether to work"
        assert reindex_mock.call_count == 1

Example 22

Project: 8-bits
Source File: test_util.py
View license
    def test_defaultFetcher(self):
        """util._defaultFetcher"""
        if mock:

            class Response(object):
                """urllib2.Reponse mock"""
                def __init__(self, url,
                             contenttype, content,
                             exception=None, args=None):
                    self.url = url

                    mt, params = cgi.parse_header(contenttype)
                    self.mimetype = mt
                    self.charset = params.get('charset', None)

                    self.text = content

                    self.exception = exception
                    self.args = args

                def geturl(self):
                    return self.url

                def info(self):
                    mimetype, charset = self.mimetype, self.charset
                    class Info(object):
                        
                        # py2x
                        def gettype(self):
                            return mimetype
                        def getparam(self, name=None):
                            return charset
                        
                        # py 3x
                        get_content_type = gettype
                        get_content_charset = getparam # here always charset!  
                        
                    return Info()

                def read(self):
                    # returns fake text or raises fake exception
                    if not self.exception:
                        return self.text
                    else:
                        raise self.exception(*self.args)

            def urlopen(url,
                        contenttype=None, content=None,
                        exception=None, args=None):
                # return an mock which returns parameterized Response
                def x(*ignored):
                    if exception:
                        raise exception(*args)
                    else:
                        return Response(url,
                                        contenttype, content,
                                        exception=exception, args=args)
                return x

            urlopenpatch = 'urllib2.urlopen' if basetest.PY2x else 'urllib.request.urlopen' 

            # positive tests
            tests = {
                # content-type, contentstr: encoding, contentstr
                ('text/css', u'€'.encode('utf-8')):
                        (None, u'€'.encode('utf-8')),
                ('text/css;charset=utf-8', u'€'.encode('utf-8')):
                        ('utf-8', u'€'.encode('utf-8')),
                ('text/css;charset=ascii', 'a'):
                        ('ascii', 'a')
            }
            url = 'http://example.com/test.css'
            for (contenttype, content), exp in tests.items():
                @mock.patch(urlopenpatch, new=urlopen(url, contenttype, content))
                def do(url):
                    return _defaultFetcher(url)
                
                self.assertEqual(exp, do(url))

            # wrong mimetype
            @mock.patch(urlopenpatch, new=urlopen(url, 'text/html', 'a'))
            def do(url):
                return _defaultFetcher(url)
            
            self.assertRaises(ValueError, do, url)
            
            # calling url results in fake exception
                            
            # py2 ~= py3 raises error earlier than urlopen!
            tests = {
                '1': (ValueError, ['invalid value for url']),
                #_readUrl('mailto:a.css')
                'mailto:e4': (urllib2.URLError, ['urlerror']),
                # cannot resolve x, IOError
                'http://x': (urllib2.URLError, ['ioerror']),
            }
            for url, (exception, args) in tests.items():
                @mock.patch(urlopenpatch, new=urlopen(url, exception=exception, args=args))
                def do(url):
                    return _defaultFetcher(url)
                
                self.assertRaises(exception, do, url)

            # py2 != py3 raises error earlier than urlopen!
            urlrequestpatch = 'urllib2.urlopen' if basetest.PY2x else 'urllib.request.Request' 
            tests = {
                #_readUrl('http://cthedot.de/__UNKNOWN__.css')
                'e2': (urllib2.HTTPError, ['u', 500, 'server error', {}, None]),
                'e3': (urllib2.HTTPError, ['u', 404, 'not found', {}, None]),
            }
            for url, (exception, args) in tests.items():
                @mock.patch(urlrequestpatch, new=urlopen(url, exception=exception, args=args))
                def do(url):
                    return _defaultFetcher(url)
                
                self.assertRaises(exception, do, url)

        else:
            self.assertEqual(False, u'Mock needed for this test')

Example 23

Project: osf.io
Source File: utils.py
View license
@contextlib.contextmanager
def mock_archive(project, schema=None, auth=None, data=None, parent=None,
                 embargo=False, embargo_end_date=None,
                 retraction=False, justification=None, autoapprove_retraction=False,
                 autocomplete=True, autoapprove=False):
    """ A context manager for registrations. When you want to call Node#register_node in
    a test but do not want to deal with any of this side effects of archiver, this
    helper allows for creating a registration in a safe fashion.

    :param bool embargo: embargo the registration (rather than RegistrationApproval)
    :param bool autocomplete: automatically finish archival?
    :param bool autoapprove: automatically approve registration approval?
    :param bool retraction: retract the registration?
    :param str justification: a justification for the retraction
    :param bool autoapprove_retraction: automatically approve retraction?

    Example use:

    project = ProjectFactory()
    with mock_archive(project) as registration:
        assert_true(registration.is_registration)
        assert_true(registration.archiving)
        assert_true(registration.is_pending_registration)

    with mock_archive(project, autocomplete=True) as registration:
        assert_true(registration.is_registration)
        assert_false(registration.archiving)
        assert_true(registration.is_pending_registration)

    with mock_archive(project, autocomplete=True, autoapprove=True) as registration:
        assert_true(registration.is_registration)
        assert_false(registration.archiving)
        assert_false(registration.is_pending_registration)
    """
    schema = schema or DEFAULT_METASCHEMA
    auth = auth or Auth(project.creator)
    data = data or ''

    with mock.patch('framework.celery_tasks.handlers.enqueue_task'):
        registration = project.register_node(
            schema=schema,
            auth=auth,
            data=data,
            parent=parent,
        )
    if embargo:
        embargo_end_date = embargo_end_date or (
            datetime.datetime.now() + datetime.timedelta(days=20)
        )
        registration.root.embargo_registration(
            project.creator,
            embargo_end_date
        )
    else:
        registration.root.require_approval(project.creator)
    if autocomplete:
        root_job = registration.root.archive_job
        root_job.status = ARCHIVER_SUCCESS
        root_job.sent = False
        root_job.done = True
        root_job.save()
        sanction = registration.root.sanction
        with contextlib.nested(
            mock.patch.object(root_job, 'archive_tree_finished', mock.Mock(return_value=True)),
            mock.patch('website.archiver.tasks.archive_success.delay', mock.Mock())
        ):
            archiver_listeners.archive_callback(registration)
    if autoapprove:
        sanction = registration.root.sanction
        sanction.state = Sanction.APPROVED
        sanction._on_complete(project.creator)
        sanction.save()

    if retraction:
        justification = justification or "Because reasons"
        retraction = registration.retract_registration(project.creator, justification=justification)
        if autoapprove_retraction:
            retraction.state = Sanction.APPROVED
            retraction._on_complete(project.creator)
        retraction.save()
        registration.save()
    yield registration

Example 24

Project: sheer
Source File: test_indexing.py
View license
    @mock.patch('sheer.indexer.bulk')
    @mock.patch('sheer.indexer.Elasticsearch')
    @mock.patch('sheer.indexer.ContentProcessor')
    @mock.patch('sheer.indexer.read_json_file')
    @mock.patch('os.path.exists')
    def test_indexing_failure_valueerr(self, mock_exists, mock_read_json_file,
                              mock_ContentProcessor, mock_Elasticsearch,
                              mock_bulk):
        """
        `sheer index`

        Test the failure of indexing by Sheer via a ValueError, and make sure it
        fails gracefully. This simulates the unavailability and timeout of the
        upstream source of information.
        """
        # We want to capture stderr
        sys.stderr = StringIO()

        # Add a mock error processor to the mock_processor json. This will let us
        # have three processors total. Because we're mocking ContentProcessor()
        # below we don't have to worry about the actual contents of these
        # dictionaries.
        valueerr_mock_processor = {'valueerrs': {
            'url': 'http://test/api/get_posts/',
            'processor': 'post_processor',
            'mappings': '_settings/posts_mappings.json'}
            }

        # Mock file existing/opening/reading
        # os.path.exists is only called directly for settings.json and
        # mappings.json, which are not necessary for our tests.
        mock_exists.return_value = False
        mock_read_json_file.side_effect = [valueerr_mock_processor, {}]

        # A context processor that will raise a ValueError to simulate bad json
        # being provided by the upstream source.
        mock_bulk.side_effect = ValueError("No JSON object could be decoded")
        mock_valueerr_processor = mock.Mock(spec=ContentProcessor)
        mock_valueerr_processor.name = 'valueerrs'
        mock_valueerr_processor.processor_name = 'posts_processor'
        mock_valueerr_processor.mapping.return_value = {}

        # Make sure ContentProcessor returns the err processor
        mock_ContentProcessor.side_effect = [mock_valueerr_processor]

        # Here we assume:
        #   * Index doesn't exist -> should be created
        #   * Mappings don't exist for processor -> should be created
        #   * Documents don't exist for processor -> should be created
        #       * An exception is raised when trying to fetch documents from the
        #         context processor.
        mock_es = mock_Elasticsearch.return_value
        mock_es.indices.exists.return_value = False
        mock_es.indices.get_mapping.return_value = None

        test_args = AttrDict(processors=[], reindex=False)
        try:
            index_location(test_args, self.config)
        except SystemExit, s:
            assert s.code == \
                'Indexing the following processor(s) failed: valueerrs'

        # Ensure that we got the right error message.
        assert 'error reading documents' in sys.stderr.getvalue()

Example 25

Project: stor
Source File: test.py
View license
    def setup_s3_mocks(self):
        """Sets all of the relevant mocks for S3 communication.

        If you are testing outside of this library, you should either mock
        S3 client methods or you should focus on manipulating return value
        of mock_s3.

        Tests of methods that directly make API calls via _s3_client_call should
        mock the return values of the API calls on mock_s3. Tests of methods that
        do not directly make the API calls should mock any S3Path methods being called.

        The following variables are set up when calling this:

        - mock_s3_client: A mock of the Client instance returned by boto3.client

        - mock_s3: A mock of the Client instance returned by _get_s3_client in S3Path.

        - mock_get_s3_client: A mock of the _get_s3_client method in S3Path.

        - mock_get_s3_iterator: A mock of the _get_s3_iterator method in S3Path.

        - mock_s3_iterator: A mock of the iterable object returned by _get_s3_iterator in S3Path.

        - mock_s3_transfer: A mock of the Transfer instance returned by S3Transfer

        - mock_get_s3_transfer: A mock of the boto3.s3.transfer.S3Transfer object
        """
        # Ensure that the S3 session will never be instantiated in tests
        s3_session_patcher = mock.patch('boto3.session.Session', autospec=True)
        self.addCleanup(s3_session_patcher.stop)
        self.mock_s3_session = s3_session_patcher.start()

        # This is the mock returned by _get_s3_client.
        # User can mock s3 methods on this mock.
        self.mock_s3 = mock.Mock()
        self._get_s3_client_patcher = mock.patch('stor.s3._get_s3_client',
                                                 autospec=True)
        self.addCleanup(self.disable_get_s3_client_mock)
        self.mock_get_s3_client = self._get_s3_client_patcher.start()
        self.mock_get_s3_client.return_value = self.mock_s3

        # This is the mock returned by _get_s3_iterator.
        # User should modify the __iter__.return_value property to specify return values.
        self.mock_s3_iterator = mock.MagicMock()
        self._get_s3_iterator_patcher = mock.patch.object(S3Path, '_get_s3_iterator',
                                                          autospec=True)
        self.addCleanup(self.disable_get_s3_iterator_mock)
        self.mock_get_s3_iterator = self._get_s3_iterator_patcher.start()
        self.mock_get_s3_iterator.return_value = self.mock_s3_iterator

        # Ensure that an S3Transfer object will never be instantiated in tests.
        # User can mock methods associated with S3Transfer on this mock.
        self.mock_s3_transfer = mock.Mock()
        s3_transfer_patcher = mock.patch('stor.s3.S3Transfer', autospec=True)
        self.addCleanup(s3_transfer_patcher.stop)
        self.mock_get_s3_transfer = s3_transfer_patcher.start()
        self.mock_get_s3_transfer.return_value = self.mock_s3_transfer

        # Mock the TransferConfig object
        s3_transfer_config_patcher = mock.patch('stor.s3.TransferConfig',
                                                autospec=True)
        self.addCleanup(s3_transfer_config_patcher.stop)
        self.mock_get_s3_transfer_config = s3_transfer_config_patcher.start()

Example 26

View license
@mock.patch('library.cl_img_install.switch_slot')
@mock.patch('library.cl_img_install.get_slot_info')
@mock.patch('library.cl_img_install.AnsibleModule')
def test_check_sw_version(mock_module, mock_get_slot_info, mock_switch_slot):
    instance = mock_module.return_value
    # switch_slots = yes , version found in alternate slots and is primary
    instance.params.get.return_value = True
    slot_values = {
        '1': {'version': '2.0.10', 'primary': True},
        '2': {'version': '2.0.3', 'active': True}
    }
    mock_get_slot_info.return_value = slot_values
    instance.sw_version = '2.0.10'
    check_sw_version(instance)
    _msg = 'Version 2.0.10 is installed in the alternate slot. ' +\
        'Next reboot, switch will load 2.0.10.'
    instance.exit_json.assert_called_with(msg=_msg, changed=False)

    # switch slot = no , version found in alternate slot
    instance.params.get.return_value = False
    instance.sw_version = '2.0.10'
    check_sw_version(instance)
    _msg = 'Version 2.0.10 is installed in the alternate slot. ' +\
        'switch_slot set to "no". No further action to take'
    instance.exit_json.assert_called_with(msg=_msg, changed=False)

    # switch_slots = yes , version found in alternate slots and not primary
    instance.params.get.return_value = True
    slot_values = {
        '1': {'version': '2.0.10'},
        '2': {'version': '2.0.3', 'primary': True, 'active': True}
    }
    mock_get_slot_info.return_value = slot_values
    instance.sw_version = '2.0.10'
    check_sw_version(instance)
    _msg = 'Version 2.0.10 is installed in the alternate slot. cl-img-select has made the alternate slot the primary slot. Next reboot, switch will load 2.0.10.'
    instance.exit_json.assert_called_with(msg=_msg, changed=True)

    # switch slot = no , version found in alternate slot
    instance.params.get.return_value = False
    instance.sw_version = '2.0.10'
    check_sw_version(instance)
    _msg = "Version 2.0.10 is installed in the alternate slot. Next reboot will not load 2.0.10. switch_slot keyword set to 'no'."
    instance.exit_json.assert_called_with(msg=_msg, changed=False)

    # switch_slot = yes code in active slot
    slot_values = {
        '1': {'version': '2.0.10'},
        '2': {'version': '2.0.3',  'primary': True, 'active': True}
    }
    mock_get_slot_info.return_value = slot_values
    instance.params.get.return_value = True
    instance.sw_version = '2.0.3'
    check_sw_version(instance)
    _msg = 'Version 2.0.3 is installed in the active slot'
    instance.exit_json.assert_called_with(msg=_msg, changed=False)

    # switch_slot = no , code in active slot
    instance.params.get.return_value = False
    instance.sw_version = '2.0.3'
    check_sw_version(instance)
    _msg = 'Version 2.0.3 is installed in the active slot'
    instance.exit_json.assert_called_with(msg=_msg, changed=False)

    # switch_slot = no, code in alternate slot
    slot_values = {
        '1': {'version': '2.0.10', 'active': True},
        '2': {'version': '2.0.3',  'primary': True}
    }
    mock_get_slot_info.return_value = slot_values
    instance.sw_version = '2.0.3'
    check_sw_version(instance)
    instance.exit_json.assert_called_with(
        msg='Version 2.0.3 is installed in the alternate slot. ' +
        'switch_slot set to "no". No further action to take', changed=False)

    instance.params.get.return_value = True
    check_sw_version(instance)
    instance.exit_json.assert_called_with(
        msg='Version 2.0.3 is installed in the alternate slot. ' +
        'Next reboot, switch will load 2.0.3.', changed=False)

Example 27

Project: flask-bitmapist
Source File: test_extension.py
View license
@mock.patch('flask_bitmapist.utils.BitOpAnd')
@mock.patch('flask_bitmapist.utils.BitOpOr')
@mock.patch('flask_bitmapist.utils.BitOpXor')
@mock.patch('flask_bitmapist.utils.chain_events')
@mock.patch('flask_bitmapist.utils.YearEvents')
@mock.patch('flask_bitmapist.utils.MonthEvents')
@mock.patch('flask_bitmapist.utils.WeekEvents')
def test_get_cohort_structure(mock_week_events, mock_month_events,
                              mock_year_events, mock_chain_events,
                              mock_bit_op_xor, mock_bit_op_or, mock_bit_op_and):
    # Generate list of ints to act as user ids;
    # - between calls, should have some duplicate and some distinct
    # - temporarily convert to a set to force unique list items

    def e():
        return list(set([randint(1, 25) for n in range(10)]))

    def ee():
        return [e() for n in range(100)]

    mock_week_events.side_effect = ee()
    mock_month_events.side_effect = ee()
    mock_year_events.side_effect = ee()
    mock_chain_events.side_effect = ee()

    # Simulate BitOpAnd & BitOpOr returns but with lists
    mock_bit_op_and.side_effect = lambda x, y: list(set(x) & set(y))
    mock_bit_op_or.side_effect = lambda x, y: list(set(x) | set(y))
    mock_bit_op_xor.side_effect = lambda x, y: list(set(x) ^ set(y))

    c1, d1, t1 = get_cohort('A', 'B', time_group='weeks', num_rows=4, num_cols=4)
    c2, d2, t2 = get_cohort('A', 'B', time_group='months', num_rows=6, num_cols=5)
    c3, d3, t3 = get_cohort('A', 'B', time_group='years', num_rows=2, num_cols=3)

    # Assert cohort (+ date and total) lengths based on num_rows
    assert len(c1) == 4
    assert len(c1) == len(d1)
    assert len(c1) == len(t1)
    assert len(c2) == 6
    assert len(c2) == len(d2)
    assert len(c2) == len(t2)
    assert len(c3) == 2
    assert len(c3) == len(d3)
    assert len(c3) == len(t3)
    # Assert cohort row lengths based on num_cols
    assert len(c1[0]) == 4
    assert len(c2[0]) == 5
    assert len(c3[0]) == 3

    # Assert date values based on time_group given
    #     - dates are old->new, so use num_rows-1 to adjust index for timedelta

    def _week(x):
        return (x.year, x.month, x.day, x.isocalendar()[1])

    def _month(x):
        return (x.year, x.month)

    def _year(x):
        return (x.year)

    # 1 - weeks
    for idx, d in enumerate(d1):
        assert _week(d) == _week(now - timedelta(weeks=3-idx))
    # 2 - months
    for idx, d in enumerate(d2):
        this_month = now.replace(day=1)  # work with first day of month
        months_ago = (5 - idx) * 365 / 12  # no 'months' arg for timedelta
        assert _month(d) == _month(this_month - timedelta(months_ago))
    # 3 - years
    for idx, d in enumerate(d3):
        this_year = now.replace(month=1, day=1)  # work with first day of year
        years_ago = (1 - idx) * 365  # no 'years' arg for timedelta
        assert _year(d) == _year(this_year - timedelta(years_ago))

Example 28

Project: dd-agent
Source File: test_service_discovery.py
View license
    def test_fill_tpl(self):
        """Test _fill_tpl with mocked docker client"""

        valid_configs = [
            # ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))
            (({}, {'host': 'localhost'}, [], None), ({'host': 'localhost'}, {})),
            (
                ({'NetworkSettings': {'IPAddress': ''}}, {'host': 'localhost'}, [], None),
                ({'host': 'localhost'}, {})
            ),
            (
                ({'NetworkSettings': {'Networks': {}}}, {'host': 'localhost'}, [], None),
                ({'host': 'localhost'}, {})
            ),
            (
                ({'NetworkSettings': {'Networks': {'bridge': {}}}}, {'host': 'localhost'}, [], None),
                ({'host': 'localhost'}, {})
            ),
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1'}},
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
            ),
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Networks': {}}},
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '127.0.0.1'}),
            ),
            (
                ({'NetworkSettings': {
                    'IPAddress': '127.0.0.1',
                    'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}
                  },
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host': '172.17.0.2'}),
            ),
            (
                ({'NetworkSettings': {
                    'IPAddress': '',
                    'Networks': {
                        'bridge': {'IPAddress': '172.17.0.2'},
                        'foo': {'IPAddress': '192.168.0.2'}
                    }}
                  },
                 {'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
                ({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
                 {'host_bridge': '172.17.0.2'}),
            ),
            (
                ({'NetworkSettings': {
                    'IPAddress': '',
                    'Networks': {
                        'bridge': {'IPAddress': '172.17.0.2'},
                        'foo': {'IPAddress': '192.168.0.2'}
                    }}
                  },
                 {'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
                ({'host': '%%host_foo%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
                 {'host_foo': '192.168.0.2'}),
            ),
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
                 {'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test']},
                 ['host', 'port_1'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': '%%port_1%%', 'tags': ['env:test', 'foo', 'bar:baz']},
                 {'host': '127.0.0.1', 'port_1': '42'})
            )
        ]

        # should not fail but return something specific
        edge_cases = [
            # ((inspect, instance_tpl, variables, tags), (expected_instance_tpl, expected_var_values))

            # specify bridge but there is also a default IPAddress (networks should be preferred)
            (
                ({'NetworkSettings': {
                    'IPAddress': '127.0.0.1',
                    'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
                 {'host': '%%host_bridge%%', 'port': 1337}, ['host_bridge'], ['foo', 'bar:baz']),
                ({'host': '%%host_bridge%%', 'port': 1337, 'tags': ['foo', 'bar:baz']},
                 {'host_bridge': '172.17.0.2'})
            ),
            # specify index but there is a default IPAddress (there's a specifier, even if it's wrong, walking networks should be preferred)
            (
                ({'NetworkSettings': {
                    'IPAddress': '127.0.0.1',
                    'Networks': {'bridge': {'IPAddress': '172.17.0.2'}}}},
                 {'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
                ({'host': '%%host_0%%', 'port': 1337, 'tags': ['foo', 'bar:baz']}, {'host_0': '172.17.0.2'}),
            ),
            # missing key for host, bridge network should be preferred
            (
                ({'NetworkSettings': {'Networks': {
                    'bridge': {'IPAddress': '127.0.0.1'},
                    'foo': {'IPAddress': '172.17.0.2'}}}},
                 {'host': '%%host_bar%%', 'port': 1337}, ['host_bar'], []),
                ({'host': '%%host_bar%%', 'port': 1337}, {'host_bar': '127.0.0.1'}),
            ),
            # missing index for port
            (
                ({'NetworkSettings': {'IPAddress': '127.0.0.1', 'Ports': {'42/tcp': None, '22/tcp': None}}},
                 {'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test']},
                 ['host', 'port_2'], ['foo', 'bar:baz']),
                ({'host': '%%host%%', 'port': '%%port_2%%', 'tags': ['env:test', 'foo', 'bar:baz']},
                 {'host': '127.0.0.1', 'port_2': '42'})
            )
        ]

        # should raise
        invalid_config = [
            # ((inspect, instance_tpl, variables, tags), expected_exception)

            # template variable but no IPAddress available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': '%%host%%', 'port': 1337}, ['host'], ['foo', 'bar:baz']),
                Exception,
            ),
            # index but no IPAddress available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': '%%host_0%%', 'port': 1337}, ['host_0'], ['foo', 'bar:baz']),
                Exception,
            ),
            # key but no IPAddress available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': '%%host_foo%%', 'port': 1337}, ['host_foo'], ['foo', 'bar:baz']),
                Exception,
            ),

            # template variable but no port available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': 'localhost', 'port': '%%port%%'}, ['port'], []),
                Exception,
            ),
            # index but no port available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': 'localhost', 'port_0': '%%port%%'}, ['port_0'], []),
                Exception,
            ),
            # key but no port available
            (
                ({'NetworkSettings': {'Networks': {}}},
                 {'host': 'localhost', 'port': '%%port_foo%%'}, ['port_foo'], []),
                Exception,
            )
        ]

        with mock.patch('utils.dockerutil.DockerUtil.client', return_value=None):
            with mock.patch.object(EtcdStore, 'get_client', return_value=None):
                with mock.patch.object(ConsulStore, 'get_client', return_value=None):
                    for ac in self.agentConfigs:
                        sd_backend = get_sd_backend(agentConfig=ac)
                        try:
                            for co in valid_configs + edge_cases:
                                inspect, tpl, variables, tags = co[0]
                                instance_tpl, var_values = sd_backend._fill_tpl(inspect, tpl, variables, tags)
                                for key in instance_tpl.keys():
                                    if isinstance(instance_tpl[key], list):
                                        self.assertEquals(len(instance_tpl[key]), len(co[1][0].get(key)))
                                        for elem in instance_tpl[key]:
                                            self.assertTrue(elem in co[1][0].get(key))
                                    else:
                                        self.assertEquals(instance_tpl[key], co[1][0].get(key))
                                self.assertEquals(var_values, co[1][1])

                            for co in invalid_config:
                                inspect, tpl, variables, tags = co[0]
                                self.assertRaises(co[1], sd_backend._fill_tpl(inspect, tpl, variables, tags))

                            clear_singletons(ac)
                        except Exception:
                            clear_singletons(ac)
                            raise

Example 29

Project: aenea
Source File: test_server_x11.py
View license
    @mock.patch('server_x11.write_command')
    @mock.patch('server_x11.flush_xdotool')
    @mock.patch('server_x11.run_command')
    def test_multiple_actions(self, run_command, flush, write_command):
        calls = []

        def mock_flush(actions):
            '''Mock has issues with the del [:].'''
            if actions:
                calls.append(actions[:])
            del actions[:]

        flush.side_effect = mock_flush
        self.port = PORT + 7
        self.server = server_x11.setup_server(HOST, self.port)

        test_thread = threading.Thread(target=self.multiple_actions)
        test_thread.start()
        self.server.serve_forever()
        test_thread.join()

        # No easy way to test interleaving, so we rely on shape of flushes
        # to check proper happens-before.
        self.assertEqual(
            write_command.mock_calls,
            [mock.call(
                'Hello world!',
                arguments='type --file - --delay 0'
                )] * 2
            )

        step1 = [
            'key a',
            'keydown Shift_L',
            'key a',
            'keyup Shift_L',
            'keydown Shift_L',
            'key b',
            'key b',
            'key b',
            'keyup Shift_L'
            ]

        step2 = [
            'click  --repeat 2 1',
            'click  --repeat 2 4',
            'click   3',
            'click --delay 70  --repeat 5 3',
            'click   2',
            'sleep 0.500000'
            ]

        self.assertEqual(calls, [step1, step2, step1 + step2])

Example 30

Project: DIRAC
Source File: Test_XROOTStorage.py
View license
  @mock.patch('os.path.exists', new=MagicMock( return_value = True ))
  @mock.patch('DIRAC.Resources.Storage.XROOTStorage.getSize', new=MagicMock( return_value = 1 ))
  def test_putFile( self ):
    """ Test the output of putFile"""

    global mocked_xrootclient

    resource = XROOTStorage( 'storageName', self.parameterDict )

    statusMock = xrootStatusMock()
    statusMock.makeOk()

    mocked_xrootclient.copy.return_value = statusMock, None

    statusMkDirMock = xrootStatusMock()
    statusMkDirMock.makeOk()

    mocked_xrootclient.mkdir.return_value = statusMkDirMock, None

    statusRmMock = xrootStatusMock()
    statusRmMock.makeOk()

    mocked_xrootclient.rm.return_value = statusRmMock, None

    statusStatMock = xrootStatusMock()
    statusStatMock.makeOk()


    statInfoMock = xrootStatInfoMock()
    statInfoMock.makeFile()
    statInfoMock.size = 1

    updateStatMockReferences(statusStatMock, statInfoMock)

    # This test should be completely okay
    copymock = mock.Mock()
    copymock.run.return_value = (statusMock, None)
    mocked_xrootd.client.CopyProcess = mock.Mock(return_value = copymock)
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {"remoteA" : 1}, res['Value']['Successful'] )
    self.assertEqual( {}, res['Value']['Failed'] )


    # Here the sizes should not match
    statInfoMock.size = 1000
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {}, res['Value']['Successful'] )
    self.assertEqual( "remoteA", res['Value']['Failed'].keys()[0] )
    statInfoMock.size = 1


    # Here we should not be able to get the file from storage
    statusMock.makeError()
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {}, res['Value']['Successful'] )
    self.assertEqual( "remoteA", res['Value']['Failed'].keys()[0] )

    # Fatal error in getting the file from storage
    statusMock.makeFatal()
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {}, res['Value']['Successful'] )
    self.assertEqual( "remoteA", res['Value']['Failed'].keys()[0] )

    # Bad input
    res = resource.putFile( "remoteA" )
    self.assertEqual( False, res['OK'] )


    # Error, but not 3011 when checking existance of file, and then successful anyway
    statusMock.makeOk()

    with mock.patch.object(XROOTStorage, '_XROOTStorage__singleExists', return_value=S_OK(S_ERROR("error checking existance "))):
      res = resource.putFile( {"remoteA" : "localA"} )
      self.assertEqual( True, res['OK'] )
      self.assertEqual(  {'remoteA': 1}, res['Value']['Successful'] )

Example 31

Project: edx-platform
Source File: testutil.py
View license
@contextmanager
def simulate_running_pipeline(pipeline_target, backend, email=None, fullname=None, username=None):
    """Simulate that a pipeline is currently running.

    You can use this context manager to test packages that rely on third party auth.

    This uses `mock.patch` to override some calls in `third_party_auth.pipeline`,
    so you will need to provide the "target" module *as it is imported*
    in the software under test.  For example, if `foo/bar.py` does this:

    >>> from third_party_auth import pipeline

    then you will need to do something like this:

    >>> with simulate_running_pipeline("foo.bar.pipeline", "google-oauth2"):
    >>>    bar.do_something_with_the_pipeline()

    If, on the other hand, `foo/bar.py` had done this:

    >>> import third_party_auth

    then you would use the target "foo.bar.third_party_auth.pipeline" instead.

    Arguments:

        pipeline_target (string): The path to `third_party_auth.pipeline` as it is imported
            in the software under test.

        backend (string): The name of the backend currently running, for example "google-oauth2".
            Note that this is NOT the same as the name of the *provider*.  See the Python
            social auth documentation for the names of the backends.

    Keyword Arguments:
        email (string): If provided, simulate that the current provider has
            included the user's email address (useful for filling in the registration form).

        fullname (string): If provided, simulate that the current provider has
            included the user's full name (useful for filling in the registration form).

        username (string): If provided, simulate that the pipeline has provided
            this suggested username.  This is something that the `third_party_auth`
            app generates itself and should be available by the time the user
            is authenticating with a third-party provider.

    Returns:
        None

    """
    pipeline_data = {
        "backend": backend,
        "kwargs": {
            "details": {}
        }
    }
    if email is not None:
        pipeline_data["kwargs"]["details"]["email"] = email
    if fullname is not None:
        pipeline_data["kwargs"]["details"]["fullname"] = fullname
    if username is not None:
        pipeline_data["kwargs"]["username"] = username

    pipeline_get = mock.patch("{pipeline}.get".format(pipeline=pipeline_target), spec=True)
    pipeline_running = mock.patch("{pipeline}.running".format(pipeline=pipeline_target), spec=True)

    mock_get = pipeline_get.start()
    mock_running = pipeline_running.start()

    mock_get.return_value = pipeline_data
    mock_running.return_value = True

    try:
        yield

    finally:
        pipeline_get.stop()
        pipeline_running.stop()

Example 32

Project: barman
Source File: test_recovery_executor.py
View license
    @mock.patch('barman.recovery_executor.RsyncCopyController')
    @mock.patch('barman.recovery_executor.RsyncPgData')
    @mock.patch('barman.recovery_executor.UnixRemoteCommand')
    def test_recovery(self, remote_cmd_mock, rsync_pg_mock,
                      copy_controller_mock, tmpdir):
        """
        Test the execution of a recovery
        """
        # Prepare basic directory/files structure
        dest = tmpdir.mkdir('destination')
        base = tmpdir.mkdir('base')
        wals = tmpdir.mkdir('wals')
        backup_info = testing_helpers.build_test_backup_info(tablespaces=[])
        backup_info.config.basebackups_directory = base.strpath
        backup_info.config.wals_directory = wals.strpath
        backup_info.version = 90400
        datadir = base.mkdir(backup_info.backup_id).mkdir('data')
        backup_info.pgdata = datadir.strpath
        postgresql_conf_local = datadir.join('postgresql.conf')
        postgresql_auto_local = datadir.join('postgresql.auto.conf')
        postgresql_conf_local.write('archive_command = something\n'
                                    'data_directory = something')
        postgresql_auto_local.write('archive_command = something\n'
                                    'data_directory = something')
        shutil.copy2(postgresql_conf_local.strpath, dest.strpath)
        shutil.copy2(postgresql_auto_local.strpath, dest.strpath)
        # Avoid triggering warning for missing config files
        datadir.ensure('pg_hba.conf')
        datadir.ensure('pg_ident.conf')
        # Build an executor
        server = testing_helpers.build_real_server(
            global_conf={
                "barman_lock_directory": tmpdir.mkdir('lock').strpath
            },
            main_conf={
                "wals_directory": wals.strpath
            })
        executor = RecoveryExecutor(server.backup_manager)
        # test local recovery
        rec_info = executor.recover(backup_info, dest.strpath, None, None,
                                    None, None, None, True, None)
        # remove not usefull keys from the result
        del rec_info['cmd']
        sys_tempdir = rec_info['tempdir']
        assert rec_info == {
            'rsync': None,
            'tempdir': sys_tempdir,
            'wal_dest': dest.join('pg_xlog').strpath,
            'recovery_dest': 'local',
            'destination_path': dest.strpath,
            'temporary_configuration_files': [
                dest.join('postgresql.conf').strpath,
                dest.join('postgresql.auto.conf').strpath],
            'results': {
                'delete_barman_xlog': False,
                'get_wal': False,
                'changes': [
                    Assertion._make([
                        'postgresql.conf',
                        0,
                        'archive_command',
                        'false']),
                    Assertion._make([
                        'postgresql.auto.conf',
                        0,
                        'archive_command',
                        'false'])],
                'missing_files': [],
                'warnings': [
                    Assertion._make([
                        'postgresql.conf',
                        2,
                        'data_directory',
                        'something']),
                    Assertion._make([
                        'postgresql.auto.conf',
                        2,
                        'data_directory',
                        'something'])]},
            'target_epoch': None,
            'configuration_files': [
                'postgresql.conf',
                'postgresql.auto.conf'],
            'target_datetime': None,
            'safe_horizon': None,
            'is_pitr': False,
            'get_wal': False,
        }
        # test remote recovery
        rec_info = executor.recover(backup_info, dest.strpath, {}, None, None,
                                    None, None, True, "[email protected]")
        # remove not useful keys from the result
        del rec_info['cmd']
        del rec_info['rsync']
        sys_tempdir = rec_info['tempdir']
        assert rec_info == {
            'tempdir': sys_tempdir,
            'wal_dest': dest.join('pg_xlog').strpath,
            'recovery_dest': 'remote',
            'destination_path': dest.strpath,
            'temporary_configuration_files': [
                os.path.join(sys_tempdir, 'postgresql.conf'),
                os.path.join(sys_tempdir, 'postgresql.auto.conf')],
            'results': {
                'delete_barman_xlog': False,
                'get_wal': False,
                'changes': [
                    Assertion._make([
                        'postgresql.conf',
                        0,
                        'archive_command',
                        'false']),
                    Assertion._make([
                        'postgresql.auto.conf',
                        0,
                        'archive_command',
                        'false'])],
                'missing_files': [],
                'warnings': [
                    Assertion._make([
                        'postgresql.conf',
                        2,
                        'data_directory',
                        'something']),
                    Assertion._make([
                        'postgresql.auto.conf',
                        2,
                        'data_directory',
                        'something'])]},
            'target_epoch': None,
            'configuration_files': [
                'postgresql.conf',
                'postgresql.auto.conf'],
            'target_datetime': None,
            'safe_horizon': None,
            'is_pitr': False,
            'get_wal': False,
        }
        # test failed rsync
        rsync_pg_mock.side_effect = CommandFailedException()
        with pytest.raises(CommandFailedException):
            executor.recover(backup_info, dest.strpath, {}, None, None, None,
                             None, True, "[email protected]")

Example 33

Project: fbpush
Source File: test_push.py
View license
    @mock.patch('fbpush.push.logger.info')
    @mock.patch('fbpush.push.logger.debug')
    @mock.patch('fbpush.main.logger.info')
    @mock.patch('fbpush.main.reactor.callLater')
    def test_success_real_commit(self, mock_reactor, mock_main_log,
                                 mock_debug, mock_info):
        """ This checks the successful checkRealCommi call
            both success and failure
        """

        jnx = self.mock_JunoscriptRpc
        jnx.configlet_names = 'abc'
        jnx.configlet_hash = mock.MagicMock()
        jnx.configlet_hash.hexdigest = mock.MagicMock()

        jnx.configlet_hash.hexdigest.return_value = 'hex1'

        self.mock_JunoscriptRpc.updateStatus = mock.MagicMock()
        self.mock_JunoscriptRpc.commitForReal()
        msg = 'Executing commit on device %s'
        kwargs = {}
        kwargs = {
            'log': "MD5(%s)=%s" % (','.join(jnx.configlet_names),
                                   jnx.configlet_hash.hexdigest()),
            'full': True
        }

        # Now call send_xml_call ethod which contains checks for the
        # sending xml requests to the network devices.
        self.send_xml_call(mock_info, '.', 'EXECUTING REAL COMMIT', True,
                           'commit_configuration', msg, self.device.name,
                           **kwargs)
        rcommit_response = self.read_xml_file('real_commit.xml')
        self.mock_JunoscriptRpc.updateStatus = mock.MagicMock()

        # Setting the state to the one where the fsm would have been
        # in the normal workflow of the code. In this case the method
        # testRealCommit would be called only after 'commitForReal'
        # method which sets the state to ST_COMMIT_FOR_REAL_TESTING.
        # This prev_state is commond for both success and failure.
        jnx = self.mock_JunoscriptRpc

        jnx.fsm.state = jnx.fsm.ST_COMMIT_FOR_REAL_TESTING
        prev_state = jnx.fsm.state

        # Testing the code when the xml_reply is successful
        EV_TEST_SUCCESS = self.mock_JunoscriptRpc.fsm.EV_TEST_SUCCESS

        self.mock_JunoscriptRpc.testRealCommit(rcommit_response)
        self.success_reply_xml('!', 'COMMITTED SUCCESSFULLY!', False,
                               jnx, EV_TEST_SUCCESS, prev_state)

Example 34

Project: bodhi
Source File: test_masher.py
View license
    @mock.patch(**mock_taskotron_results)
    @mock.patch('bodhi.server.consumers.masher.MasherThread.update_comps')
    @mock.patch('bodhi.server.consumers.masher.MashThread.run')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.wait_for_mash')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.sanity_check_repo')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.stage_repo')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.generate_updateinfo')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.wait_for_sync')
    @mock.patch('bodhi.server.notifications.publish')
    def test_security_update_priority_testing(self, publish, *args):
        with self.db_factory() as db:
            up = db.query(Update).one()
            up.type = UpdateType.security
            up.request = UpdateRequest.testing
            user = db.query(User).first()

            # Create a security update for a different release
            release = Release(
                name=u'F18', long_name=u'Fedora 18',
                id_prefix=u'FEDORA', version=u'18',
                dist_tag=u'f18', stable_tag=u'f18-updates',
                testing_tag=u'f18-updates-testing',
                candidate_tag=u'f18-updates-candidate',
                pending_signing_tag=u'f18-updates-testing-signing',
                pending_testing_tag=u'f18-updates-testing-pending',
                pending_stable_tag=u'f18-updates-pending',
                override_tag=u'f18-override',
                branch=u'f18')
            db.add(release)
            build = Build(nvr=u'bodhi-2.0-1.fc18', release=release,
                          package=up.builds[0].package)
            db.add(build)
            update = Update(
                title=u'bodhi-2.0-1.fc18',
                builds=[build], user=user,
                status=UpdateStatus.testing,
                request=UpdateRequest.stable,
                notes=u'Useful details!', release=release)
            update.type = UpdateType.enhancement
            db.add(update)

            # Wipe out the tag cache so it picks up our new release
            Release._tag_cache = None

        self.msg['body']['msg']['updates'] += ['bodhi-2.0-1.fc18']

        self.masher.consume(self.msg)

        # Ensure that F17 updates-testing runs before F18
        calls = publish.mock_calls
        self.assertEquals(calls[1], mock.call(
            msg={'repo': u'f17-updates-testing',
                 'updates': [u'bodhi-2.0-1.fc17'],
                 'agent': 'lmacken'},
            force=True,
            topic='mashtask.mashing'))
        self.assertEquals(calls[3], mock.call(
            msg={'success': True,
                 'repo': 'f17-updates-testing',
                 'agent': 'lmacken'},
            force=True,
            topic='mashtask.complete'))
        self.assertEquals(calls[4], mock.call(
            msg={'repo': u'f18-updates',
                 'updates': [u'bodhi-2.0-1.fc18'],
                 'agent': 'lmacken'},
            force=True,
            topic='mashtask.mashing'))
        self.assertEquals(calls[-1], mock.call(
            msg={'success': True, 'repo': 'f18-updates', 'agent': 'lmacken'},
            force=True,
            topic='mashtask.complete'))

Example 35

Project: bodhi
Source File: test_masher.py
View license
    @mock.patch(**mock_taskotron_results)
    @mock.patch('bodhi.server.consumers.masher.MasherThread.update_comps')
    @mock.patch('bodhi.server.consumers.masher.MashThread.run')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.wait_for_mash')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.sanity_check_repo')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.stage_repo')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.generate_updateinfo')
    @mock.patch('bodhi.server.consumers.masher.MasherThread.wait_for_sync')
    @mock.patch('bodhi.server.notifications.publish')
    def test_security_updates_parallel(self, publish, *args):
        with self.db_factory() as db:
            up = db.query(Update).one()
            up.type = UpdateType.security
            up.status = UpdateStatus.testing
            up.request = UpdateRequest.stable
            user = db.query(User).first()

            # Create a security update for a different release
            release = Release(
                name=u'F18', long_name=u'Fedora 18',
                id_prefix=u'FEDORA', version=u'18',
                dist_tag=u'f18', stable_tag=u'f18-updates',
                testing_tag=u'f18-updates-testing',
                candidate_tag=u'f18-updates-candidate',
                pending_signing_tag=u'f18-updates-testing-signing',
                pending_testing_tag=u'f18-updates-testing-pending',
                pending_stable_tag=u'f18-updates-pending',
                override_tag=u'f18-override',
                branch=u'f18')
            db.add(release)
            build = Build(nvr=u'bodhi-2.0-1.fc18', release=release,
                          package=up.builds[0].package)
            db.add(build)
            update = Update(
                title=u'bodhi-2.0-1.fc18',
                builds=[build], user=user,
                status=UpdateStatus.testing,
                request=UpdateRequest.stable,
                notes=u'Useful details!', release=release)
            update.type = UpdateType.security
            db.add(update)

            # Wipe out the tag cache so it picks up our new release
            Release._tag_cache = None

        self.msg['body']['msg']['updates'] += ['bodhi-2.0-1.fc18']

        self.masher.consume(self.msg)

        # Ensure that F18 and F17 run in parallel
        calls = publish.mock_calls
        if calls[1] == mock.call(
            msg={'repo': u'f18-updates',
                 'updates': [u'bodhi-2.0-1.fc18'],
                 'agent': 'lmacken'},
            force=True,
            topic='mashtask.mashing'):
            self.assertEquals(calls[2], mock.call(
                msg={'repo': u'f17-updates',
                     'updates': [u'bodhi-2.0-1.fc17'],
                     'agent': 'lmacken'},
                force=True,
                topic='mashtask.mashing'))
        elif calls[1] == self.assertEquals(calls[1], mock.call(
            msg={'repo': u'f17-updates',
                 'updates': [u'bodhi-2.0-1.fc17'],
                 'agent': 'lmacken'},
            force=True,
            topic='mashtask.mashing')):
            self.assertEquals(calls[2], mock.call(
                msg={'repo': u'f18-updates',
                     'updates': [u'bodhi-2.0-1.fc18']},
                force=True,
                topic='mashtask.mashing'))

Example 36

Project: cot
Source File: test_vmdktool.py
View license
    @mock.patch('platform.system', return_value='Linux')
    @mock.patch('os.path.isdir', return_value=False)
    @mock.patch('os.path.exists', return_value=False)
    @mock.patch('os.makedirs', side_effect=OSError)
    @mock.patch('distutils.spawn.find_executable')
    @mock.patch('COT.helpers.helper.check_output', return_value="")
    @mock.patch('subprocess.check_call')
    def test_install_helper_apt_get(self,
                                    mock_check_call,
                                    mock_check_output,
                                    *_):
        """Test installation via 'apt-get'."""
        self.enable_apt_install()
        helpers['dpkg']._installed = True
        helpers['make']._installed = False
        self.helper.install()
        self.assertSubprocessCalls(
            mock_check_output,
            [
                ['dpkg', '-s', 'make'],
                ['dpkg', '-s', 'zlib1g-dev'],
            ])
        self.assertSubprocessCalls(
            mock_check_call,
            [
                ['apt-get', '-q', 'update'],
                ['apt-get', '-q', 'install', 'make'],
                ['apt-get', '-q', 'install', 'zlib1g-dev'],
                ['make', 'CFLAGS="-D_GNU_SOURCE -g -O -pipe"'],
                ['sudo', 'mkdir', '-p', '--mode=755', '/usr/local/man/man8'],
                ['sudo', 'mkdir', '-p', '--mode=755', '/usr/local/bin'],
                ['make', 'install', 'PREFIX=/usr/local'],
            ])
        self.assertAptUpdated()

        # Make sure we don't 'apt-get update/install' again unnecessarily
        mock_check_call.reset_mock()
        mock_check_output.reset_mock()
        mock_check_output.return_value = 'install ok installed'
        # fakeout!
        self.helper._installed = False
        helpers['make']._installed = True

        os.environ['PREFIX'] = '/opt/local'
        os.environ['DESTDIR'] = '/home/cot'

        self.helper.install()
        self.assertSubprocessCalls(
            mock_check_output,
            [
                ['dpkg', '-s', 'zlib1g-dev'],
            ])
        self.assertSubprocessCalls(
            mock_check_call,
            [
                ['make', 'CFLAGS="-D_GNU_SOURCE -g -O -pipe"'],
                ['sudo', 'mkdir', '-p', '--mode=755',
                 '/home/cot/opt/local/man/man8'],
                ['sudo', 'mkdir', '-p', '--mode=755',
                 '/home/cot/opt/local/bin'],
                ['make', 'install', 'PREFIX=/opt/local', 'DESTDIR=/home/cot'],
            ])

Example 37

Project: gsutil
Source File: test_metrics.py
View license
  def testConfigValueValidation(self):
    """Tests the validation of potentially PII config values."""
    string_and_bool_categories = ['check_hashes', 'content_language',
                                  'disable_analytics_prompt',
                                  'https_validate_certificates',
                                  'json_api_version',
                                  'parallel_composite_upload_component_size',
                                  'parallel_composite_upload_threshold',
                                  'prefer_api',
                                  'sliced_object_download_component_size',
                                  'sliced_object_download_threshold',
                                  'tab_completion_time_logs', 'token_cache',
                                  'use_magicfile']
    int_categories = ['debug', 'default_api_version', 'http_socket_timeout',
                      'max_retry_delay', 'num_retries',
                      'oauth2_refresh_retries', 'parallel_process_count',
                      'parallel_thread_count', 'resumable_threshold',
                      'rsync_buffer_lines',
                      'sliced_object_download_max_components',
                      'software_update_check_period', 'tab_completion_timeout',
                      'task_estimation_threshold']
    all_categories = sorted(string_and_bool_categories + int_categories)

    # Test general invalid values.
    with mock.patch('boto.config.get_value', return_value=None):
      self.assertEqual('', self.collector._ValidateAndGetConfigValues())

    with mock.patch('boto.config.get_value', return_value='invalid string'):
      self.assertEqual(','.join([
          category + ':INVALID' for category in all_categories
      ]), self.collector._ValidateAndGetConfigValues())

    # Test that non-ASCII characters are invalid.
    with mock.patch('boto.config.get_value', return_value='£'):
      self.assertEqual(','.join([
          category + ':INVALID' for category in all_categories
      ]), self.collector._ValidateAndGetConfigValues())

    # Mock valid return values for specific string validations.
    def MockValidStrings(section, category):
      if section == 'GSUtil':
        if category == 'check_hashes':
          return 'if_fast_else_skip'
        if category == 'content_language':
          return 'chi'
        if category == 'json_api_version':
          return 'v3'
        if category == 'prefer_api':
          return 'xml'
        if category in ('disable_analytics_prompt', 'use_magicfile',
                        'tab_completion_time_logs'):
          return 'True'
      if section == 'OAuth2' and category == 'token_cache':
        return 'file_system'
      if section == 'Boto' and category == 'https_validate_certificates':
        return 'True'
      return ''
    with mock.patch('boto.config.get_value', side_effect=MockValidStrings):
      self.assertEqual(
          'check_hashes:if_fast_else_skip,content_language:chi,'
          'disable_analytics_prompt:True,https_validate_certificates:True,'
          'json_api_version:v3,prefer_api:xml,tab_completion_time_logs:True,'
          'token_cache:file_system,use_magicfile:True',
          self.collector._ValidateAndGetConfigValues())

    # Test that "small" and "large" integers are appropriately validated.
    def MockValidSmallInts(_, category):
      if category in int_categories:
        return '1999'
      return ''
    with mock.patch('boto.config.get_value', side_effect=MockValidSmallInts):
      self.assertEqual(
          'debug:1999,default_api_version:1999,http_socket_timeout:1999,'
          'max_retry_delay:1999,num_retries:1999,oauth2_refresh_retries:1999,'
          'parallel_process_count:1999,parallel_thread_count:1999,'
          'resumable_threshold:1999,rsync_buffer_lines:1999,'
          'sliced_object_download_max_components:1999,'
          'software_update_check_period:1999,tab_completion_timeout:1999,'
          'task_estimation_threshold:1999',
          self.collector._ValidateAndGetConfigValues())

    def MockValidLargeInts(_, category):
      if category in int_categories:
        return '2001'
      return ''
    with mock.patch('boto.config.get_value', side_effect=MockValidLargeInts):
      self.assertEqual(
          'debug:INVALID,default_api_version:INVALID,'
          'http_socket_timeout:INVALID,max_retry_delay:INVALID,'
          'num_retries:INVALID,oauth2_refresh_retries:INVALID,'
          'parallel_process_count:INVALID,parallel_thread_count:INVALID,'
          'resumable_threshold:2001,rsync_buffer_lines:2001,'
          'sliced_object_download_max_components:INVALID,'
          'software_update_check_period:INVALID,'
          'tab_completion_timeout:INVALID,task_estimation_threshold:2001',
          self.collector._ValidateAndGetConfigValues())

      # Test that a non-integer return value is invalid.
      def MockNonIntegerValue(_, category):
        if category in int_categories:
          return '10.28'
        return ''
      with mock.patch('boto.config.get_value', side_effect=MockNonIntegerValue):
        self.assertEqual(
            ','.join([category + ':INVALID' for category in int_categories]),
            self.collector._ValidateAndGetConfigValues())

      # Test data size validation.
      def MockDataSizeValue(_, category):
        if category in ('parallel_composite_upload_component_size',
                        'parallel_composite_upload_threshold',
                        'sliced_object_download_component_size',
                        'sliced_object_download_threshold'):
          return '10MiB'
        return ''
      with mock.patch('boto.config.get_value', side_effect=MockDataSizeValue):
        self.assertEqual('parallel_composite_upload_component_size:10485760,'
                         'parallel_composite_upload_threshold:10485760,'
                         'sliced_object_download_component_size:10485760,'
                         'sliced_object_download_threshold:10485760',
                         self.collector._ValidateAndGetConfigValues())

Example 38

Project: 2015.hackatbrown.org
Source File: test_util.py
View license
    def test_defaultFetcher(self):
        """util._defaultFetcher"""
        if mock:

            class Response(object):
                """urllib2.Reponse mock"""
                def __init__(self, url,
                             contenttype, content,
                             exception=None, args=None):
                    self.url = url

                    mt, params = cgi.parse_header(contenttype)
                    self.mimetype = mt
                    self.charset = params.get('charset', None)

                    self.text = content

                    self.exception = exception
                    self.args = args

                def geturl(self):
                    return self.url

                def info(self):
                    mimetype, charset = self.mimetype, self.charset
                    class Info(object):
                        
                        # py2x
                        def gettype(self):
                            return mimetype
                        def getparam(self, name=None):
                            return charset
                        
                        # py 3x
                        get_content_type = gettype
                        get_content_charset = getparam # here always charset!  
                        
                    return Info()

                def read(self):
                    # returns fake text or raises fake exception
                    if not self.exception:
                        return self.text
                    else:
                        raise self.exception(*self.args)

            def urlopen(url,
                        contenttype=None, content=None,
                        exception=None, args=None):
                # return an mock which returns parameterized Response
                def x(*ignored):
                    if exception:
                        raise exception(*args)
                    else:
                        return Response(url,
                                        contenttype, content,
                                        exception=exception, args=args)
                return x

            urlopenpatch = 'urllib2.urlopen' if basetest.PY2x else 'urllib.request.urlopen' 

            # positive tests
            tests = {
                # content-type, contentstr: encoding, contentstr
                ('text/css', u'€'.encode('utf-8')):
                        (None, u'€'.encode('utf-8')),
                ('text/css;charset=utf-8', u'€'.encode('utf-8')):
                        ('utf-8', u'€'.encode('utf-8')),
                ('text/css;charset=ascii', 'a'):
                        ('ascii', 'a')
            }
            url = 'http://example.com/test.css'
            for (contenttype, content), exp in tests.items():
                @mock.patch(urlopenpatch, new=urlopen(url, contenttype, content))
                def do(url):
                    return _defaultFetcher(url)
                
                self.assertEqual(exp, do(url))

            # wrong mimetype
            @mock.patch(urlopenpatch, new=urlopen(url, 'text/html', 'a'))
            def do(url):
                return _defaultFetcher(url)
            
            self.assertRaises(ValueError, do, url)
            
            # calling url results in fake exception
                            
            # py2 ~= py3 raises error earlier than urlopen!
            tests = {
                '1': (ValueError, ['invalid value for url']),
                #_readUrl('mailto:a.css')
                'mailto:e4': (urllib2.URLError, ['urlerror']),
                # cannot resolve x, IOError
                'http://x': (urllib2.URLError, ['ioerror']),
            }
            for url, (exception, args) in tests.items():
                @mock.patch(urlopenpatch, new=urlopen(url, exception=exception, args=args))
                def do(url):
                    return _defaultFetcher(url)
                
                self.assertRaises(exception, do, url)

            # py2 != py3 raises error earlier than urlopen!
            urlrequestpatch = 'urllib2.urlopen' if basetest.PY2x else 'urllib.request.Request' 
            tests = {
                #_readUrl('http://cthedot.de/__UNKNOWN__.css')
                'e2': (urllib2.HTTPError, ['u', 500, 'server error', {}, None]),
                'e3': (urllib2.HTTPError, ['u', 404, 'not found', {}, None]),
            }
            for url, (exception, args) in tests.items():
                @mock.patch(urlrequestpatch, new=urlopen(url, exception=exception, args=args))
                def do(url):
                    return _defaultFetcher(url)
                
                self.assertRaises(exception, do, url)

        else:
            self.assertEqual(False, u'Mock needed for this test')

Example 39

Project: udocker
Source File: func_tests.py
View license
    @mock.patch('udocker.LocalRepository')
    @mock.patch('udocker.UdockerTools')
    @mock.patch('udocker.Msg')
    def test_do_images(self, mock_msg, mock_utools, mock_localrepo):
        """Test udocker images command"""
        udocker.msg = mock_msg
        udocker.conf = udocker.Config()
        mock_localrepo.return_value.cd_imagerepo.return_value = \
            "/home/user/.udocker/repos/X/latest"
        mock_localrepo.return_value.get_imagerepos.return_value = [
            ('iscampos/openqcd', 'latest'), ('busybox', 'latest')]
        t_argv = ['./udocker.py', "images"]
        with mock.patch.object(sys, 'argv', t_argv):
            # Unprotected
            mock_localrepo.return_value.isprotected_imagerepo\
                .return_value = False
            main = udocker.Main()
            main.execute()
            msg_out = ("busybox:latest"
                       "                                               .")
            find_str(self, msg_out, mock_msg.out.call_args)
            # Protected
            mock_localrepo.return_value.isprotected_imagerepo\
                .return_value = True
            main.execute()
            msg_out = ("busybox:latest"
                       "                                               P")
            find_str(self, msg_out, mock_msg.out.call_args)
        t_argv = ['./udocker.py', "images", "-l"]
        with mock.patch.object(sys, 'argv', t_argv):
            main = udocker.Main()
            main.execute()
            msg_out = "  /home/user/.udocker/repos/X/latest"
            find_str(self, msg_out, mock_msg.out.call_args)
            #
            mock_localrepo.return_value.get_imagerepos.return_value = [
                ('busybox', 'latest')]
            mock_localrepo.return_value.get_layers.return_value = [
                ('/home/jorge/.udocker/repos/busybox/latest/' +
                 'sha256:385e281300cc6d88bdd155e0931fbdfbb1801c2b' +
                 '0265340a40481ee2b733ae66', 675992),
                ('/home/jorge/.udocker/repos/busybox/latest/' +
                 '56ed16bd6310cca65920c653a9bb22de6b235990dcaa174' +
                 '2ff839867aed730e5.layer', 675992),
                ('/home/jorge/.udocker/repos/busybox/latest/' +
                 '56ed16bd6310cca65920c653a9bb22de6b235990dcaa174' +
                 '2ff839867aed730e5.json', 1034),
                ('/home/jorge/.udocker/repos/busybox/latest/' +
                 'bc744c4ab376115cc45c610d53f529dd2d4249ae6b35e5d' +
                 '6e7a96e58863545aa.json', 1155),
                ('/home/jorge/.udocker/repos/busybox/latest/' +
                 'bc744c4ab376115cc45c610d53f529dd2d4249ae6b35e5d' +
                 '6e7a96e58863545aa.layer', 32),
                ('/home/jorge/.udocker/repos/busybox/latest/' +
                 'sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633c' +
                 'b16422d00e8a7c22955b46d4', 32)]
            main.execute()
            msg_out = '    /home/jorge/.udocker/repos/busybox/latest/' +\
                'sha256:a3ed95caeb02ffe68cdd9fd84406680ae93d633cb16' +\
                '422d00e8a7c22955b46d4 ('
            find_str(self, msg_out, mock_msg.out.call_args)

Example 40

Project: nox
Source File: test_main.py
View license
def test_main():
    # No args
    sys.argv = [sys.executable]
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.noxfile == 'nox.py'
        assert config.envdir.endswith('.nox')
        assert config.sessions is None
        assert config.reuse_existing_virtualenvs is False
        assert config.stop_on_first_error is False
        assert config.posargs == []

    # Long-form args
    sys.argv = [
        sys.executable,
        '--noxfile', 'noxfile.py',
        '--envdir', '.other',
        '--sessions', '1', '2',
        '--reuse-existing-virtualenvs',
        '--stop-on-first-error']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.noxfile == 'noxfile.py'
        assert config.envdir.endswith('.other')
        assert config.sessions == ['1', '2']
        assert config.reuse_existing_virtualenvs is True
        assert config.stop_on_first_error is True
        assert config.posargs == []

    # Short-form args
    sys.argv = [
        sys.executable,
        '-f', 'noxfile.py',
        '-s', '1', '2',
        '-r']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.noxfile == 'noxfile.py'
        assert config.sessions == ['1', '2']
        assert config.reuse_existing_virtualenvs is True

    sys.argv = [
        sys.executable,
        '-e', '1', '2']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.sessions == ['1', '2']

    # Posargs
    sys.argv = [
        sys.executable,
        '1', '2', '3']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.posargs == ['1', '2', '3']

    sys.argv = [
        sys.executable,
        '--', '1', '2', '3']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.posargs == ['1', '2', '3']

    sys.argv = [
        sys.executable,
        '--', '1', '2', '3', '-f', '--baz']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.posargs == ['1', '2', '3', '-f', '--baz']

Example 41

Project: nox
Source File: test_main.py
View license
def test_main():
    # No args
    sys.argv = [sys.executable]
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.noxfile == 'nox.py'
        assert config.envdir.endswith('.nox')
        assert config.sessions is None
        assert config.reuse_existing_virtualenvs is False
        assert config.stop_on_first_error is False
        assert config.posargs == []

    # Long-form args
    sys.argv = [
        sys.executable,
        '--noxfile', 'noxfile.py',
        '--envdir', '.other',
        '--sessions', '1', '2',
        '--reuse-existing-virtualenvs',
        '--stop-on-first-error']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.noxfile == 'noxfile.py'
        assert config.envdir.endswith('.other')
        assert config.sessions == ['1', '2']
        assert config.reuse_existing_virtualenvs is True
        assert config.stop_on_first_error is True
        assert config.posargs == []

    # Short-form args
    sys.argv = [
        sys.executable,
        '-f', 'noxfile.py',
        '-s', '1', '2',
        '-r']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.noxfile == 'noxfile.py'
        assert config.sessions == ['1', '2']
        assert config.reuse_existing_virtualenvs is True

    sys.argv = [
        sys.executable,
        '-e', '1', '2']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.sessions == ['1', '2']

    # Posargs
    sys.argv = [
        sys.executable,
        '1', '2', '3']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.posargs == ['1', '2', '3']

    sys.argv = [
        sys.executable,
        '--', '1', '2', '3']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.posargs == ['1', '2', '3']

    sys.argv = [
        sys.executable,
        '--', '1', '2', '3', '-f', '--baz']
    with mock.patch('nox.main.run') as run_mock:
        nox.main.main()
        assert run_mock.called
        config = run_mock.call_args[0][0]
        assert config.posargs == ['1', '2', '3', '-f', '--baz']

Example 42

Project: QNET
Source File: test_qsd_codegen.py
View license
@mock.patch('qnet.misc.qsd_codegen.compilation_worker',
            return_value='/home/qnet/bin/qsd_test')
def test_compilation_worker(mock_compilation_worker, Sec6_codegen, traj1,
        traj2_10):
    codegen = Sec6_codegen
    codegen.compile(qsd_lib='~/local/lib/libqsd.a',
                qsd_headers='~/local/header', executable='qsd_test',
                path='~/bin', compiler='$CC', compile_options='-g -O0',
                keep_cc=False)
    comp_kwargs = {'executable': 'qsd_test', 'path':'~/bin',
                   'cc_code':str(codegen), 'keep_cc': False,
                   'cmd': ['$CC', '-g', '-O0', '-I~/local/header',
                           '-o', 'qsd_test', 'qsd_test.cc', '-L~/local/lib',
                           '-lqsd']}
    mock_compilation_worker.assert_called_once_with(comp_kwargs)
    operators = OrderedDict([('X1', 'X1.out'), ('X2', 'X2.out'),
                             ('A2', 'A2.out')])
    run_kwargs = {'executable': 'qsd_test', 'workdir': None,
                  'operators': operators, 'keep': False,
                  'seed': TRAJ1_SEED, 'path': '~/bin'}
    qsd_run_worker = 'qnet.misc.qsd_codegen.qsd_run_worker'
    traj1_ID = traj1.ID
    traj2_10_ID = traj2_10.ID

    traj_IDs = [] # all the IDs we generate by calls to run()

    # The first call to run()
    assert codegen.traj_data is None
    with mock.patch(qsd_run_worker, return_value=traj1) as mock_runner:
        traj_first = codegen.run(seed=TRAJ1_SEED)
        traj_IDs.append(traj_first.ID)
    mock_runner.assert_called_once_with(run_kwargs)
    assert codegen.traj_data == traj_first

    # assert that same seed raises early exception
    with mock.patch(qsd_run_worker, return_value=traj2_10) as mock_runner:
        with pytest.raises(ValueError) as exc_info:
            traj = codegen.run(seed=TRAJ1_SEED)
        assert "already in record" in str(exc_info.value)

    # The second call to run()
    with mock.patch(qsd_run_worker, return_value=traj2_10) as mock_runner:
        traj_second = codegen.run(seed=TRAJ2_SEED)
        traj_IDs.append(traj_second.ID)
    run_kwargs['seed'] = TRAJ2_SEED
    mock_runner.assert_called_once_with(run_kwargs)
    assert codegen.traj_data == traj_first + traj_second
    for col, arr in codegen.traj_data.table.items():
        delta = arr - ((traj_first.table[col]+9*traj_second.table[col])/10.0)
        assert np.max(np.abs(delta)) < 1.0e-12

    # Repeated calls to run with auto-seeding
    for call in range(5):
        def side_effect(kwargs):
            return fake_traj(traj1, traj1.new_id(), kwargs['seed'])
        with mock.patch(qsd_run_worker, side_effect=side_effect) \
                as mock_runner:
            traj = codegen.run()
            traj_IDs.append(traj.ID)
    assert len(codegen.traj_data.record) == 7

    # check that bug was fixed where codegen.traj_data was a reference to traj1
    # instead of a copy, thereby modifying the traj1 with the second call
    assert traj1.ID == traj1_ID
    assert traj2_10.ID == traj2_10_ID

Example 43

Project: rtv
Source File: test_terminal.py
View license
def test_open_link_subprocess(terminal):

    url = 'http://www.test.com'
    terminal.config['enable_media'] = True

    with mock.patch('time.sleep'),                            \
            mock.patch('os.system'),                          \
            mock.patch('subprocess.Popen') as Popen,          \
            mock.patch('six.moves.input') as six_input,       \
            mock.patch.object(terminal, 'get_mailcap_entry'):

        six_input.return_values = 'y'

        def reset_mock():
            six_input.reset_mock()
            os.system.reset_mock()
            terminal.stdscr.subwin.addstr.reset_mock()
            Popen.return_value.communicate.return_value = '', 'stderr message'
            Popen.return_value.poll.return_value = 0
            Popen.return_value.wait.return_value = 0

        def get_error():
            # Check if an error message was printed to the terminal
            status = 'Program exited with status'.encode('utf-8')
            return any(status in args[0][2] for args in
                       terminal.stdscr.subwin.addstr.call_args_list)

        # Non-blocking success
        reset_mock()
        entry = ('echo ""', 'echo %s')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert not get_error()

        # Non-blocking failure
        reset_mock()
        Popen.return_value.poll.return_value = 127
        Popen.return_value.wait.return_value = 127
        entry = ('fake .', 'fake %s')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert get_error()

        # needsterminal success
        reset_mock()
        entry = ('echo ""', 'echo %s; needsterminal')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert not get_error()

        # needsterminal failure
        reset_mock()
        Popen.return_value.poll.return_value = 127
        Popen.return_value.wait.return_value = 127
        entry = ('fake .', 'fake %s; needsterminal')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert get_error()

        # copiousoutput success
        reset_mock()
        entry = ('echo ""', 'echo %s; needsterminal; copiousoutput')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert six_input.called
        assert not get_error()

        # copiousoutput failure
        reset_mock()
        Popen.return_value.poll.return_value = 127
        Popen.return_value.wait.return_value = 127
        entry = ('fake .', 'fake %s; needsterminal; copiousoutput')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert six_input.called
        assert get_error()

Example 44

Project: rtv
Source File: test_terminal.py
View license
def test_open_link_subprocess(terminal):

    url = 'http://www.test.com'
    terminal.config['enable_media'] = True

    with mock.patch('time.sleep'),                            \
            mock.patch('os.system'),                          \
            mock.patch('subprocess.Popen') as Popen,          \
            mock.patch('six.moves.input') as six_input,       \
            mock.patch.object(terminal, 'get_mailcap_entry'):

        six_input.return_values = 'y'

        def reset_mock():
            six_input.reset_mock()
            os.system.reset_mock()
            terminal.stdscr.subwin.addstr.reset_mock()
            Popen.return_value.communicate.return_value = '', 'stderr message'
            Popen.return_value.poll.return_value = 0
            Popen.return_value.wait.return_value = 0

        def get_error():
            # Check if an error message was printed to the terminal
            status = 'Program exited with status'.encode('utf-8')
            return any(status in args[0][2] for args in
                       terminal.stdscr.subwin.addstr.call_args_list)

        # Non-blocking success
        reset_mock()
        entry = ('echo ""', 'echo %s')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert not get_error()

        # Non-blocking failure
        reset_mock()
        Popen.return_value.poll.return_value = 127
        Popen.return_value.wait.return_value = 127
        entry = ('fake .', 'fake %s')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert get_error()

        # needsterminal success
        reset_mock()
        entry = ('echo ""', 'echo %s; needsterminal')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert not get_error()

        # needsterminal failure
        reset_mock()
        Popen.return_value.poll.return_value = 127
        Popen.return_value.wait.return_value = 127
        entry = ('fake .', 'fake %s; needsterminal')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert not six_input.called
        assert get_error()

        # copiousoutput success
        reset_mock()
        entry = ('echo ""', 'echo %s; needsterminal; copiousoutput')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert six_input.called
        assert not get_error()

        # copiousoutput failure
        reset_mock()
        Popen.return_value.poll.return_value = 127
        Popen.return_value.wait.return_value = 127
        entry = ('fake .', 'fake %s; needsterminal; copiousoutput')
        terminal.get_mailcap_entry.return_value = entry
        terminal.open_link(url)
        assert six_input.called
        assert get_error()

Example 45

Project: treadmill
Source File: run_test.py
View license
    @mock.patch('pwd.getpwnam', mock.Mock(
        return_value=namedtuple(
            'pwnam',
            ['pw_uid', 'pw_dir', 'pw_shell']
        )(3, '/', '/bin/sh')))
    @mock.patch('treadmill.fs.mkdir_safe', mock.Mock())
    @mock.patch('treadmill.fs.mount_bind', mock.Mock())
    @mock.patch('treadmill.supervisor.create_service', mock.Mock())
    @mock.patch('treadmill.utils.create_script', mock.Mock())
    @mock.patch('treadmill.utils.touch', mock.Mock())
    @mock.patch('treadmill.utils.rootdir',
                mock.Mock(return_value='/test_treadmill'))
    def test__create_supervision_tree(self):
        """Test creation of the supervision tree."""
        # pylint: disable=W0212
        treadmill.subproc.EXECUTABLES = {
            'chroot': '/bin/ls',
            'pid1': '/bin/ls',
        }
        # Access protected module _create_supervision_tree
        # pylint: disable=W0212
        app = utils.to_obj(
            {
                'proid': 'myproid',
                'name': 'myproid.test#0',
                'uniqueid': 'ID1234',
                'environment': 'prod',
                'services': [
                    {
                        'name': 'command1',
                        'command': '/path/to/command',
                        'restart': {
                            'limit': 3,
                            'interval': 60,
                        },
                    }, {
                        'name': 'command2',
                        'command': '/path/to/other/command',
                        'restart': {
                            'limit': 3,
                            'interval': 60,
                        },
                    }
                ],
                'system_services': [
                    {
                        'name': 'command3',
                        'command': '/path/to/sbin/command',
                        'restart': {
                            'limit': 5,
                            'interval': 60,
                        },
                    }, {
                        'name': 'command4',
                        'command': '/path/to/other/sbin/command',
                        'restart': {
                            'limit': 5,
                            'interval': 60,
                        },
                    }
                ],
                'vring': {
                    'cells': ['a', 'b']
                },
            }
        )
        base_dir = '/some/dir'
        events_dir = '/some/dir/appevents'

        treadmill.appmgr.run._create_supervision_tree(
            base_dir,
            events_dir,
            app,
        )

        treadmill.fs.mkdir_safe.assert_has_calls([
            mock.call('/some/dir/root/services'),
            mock.call('/some/dir/services'),
            mock.call('/some/dir/services/command1/log'),
            mock.call('/some/dir/services/command2/log'),
            mock.call('/some/dir/services/command3/log'),
            mock.call('/some/dir/services/command4/log'),
            mock.call('/some/dir/sys/vring.a'),
            mock.call('/some/dir/sys/vring.a/log'),
            mock.call('/some/dir/sys/vring.b'),
            mock.call('/some/dir/sys/vring.b/log'),
            mock.call('/some/dir/sys/monitor'),
            mock.call('/some/dir/sys/monitor/log'),
            mock.call('/some/dir/sys/register'),
            mock.call('/some/dir/sys/register/log'),
            mock.call('/some/dir/sys/start_container'),
            mock.call('/some/dir/sys/start_container/log'),
        ])
        treadmill.fs.mount_bind.assert_called_with(
            '/some/dir/root', '/services', '/some/dir/services',
        )

        pwd.getpwnam.assert_has_calls(
            [
                mock.call('myproid'),
                mock.call('root')
            ],
            any_order=True
        )

        treadmill.supervisor.create_service.assert_has_calls([
            # user services
            mock.call('/some/dir/services',
                      'myproid',
                      mock.ANY, mock.ANY,
                      'command1',
                      '/path/to/command',
                      as_root=True,
                      down=True,
                      envdir='/environ',
                      env='prod'),
            mock.call('/some/dir/services',
                      'myproid',
                      mock.ANY, mock.ANY,
                      'command2',
                      '/path/to/other/command',
                      as_root=True,
                      down=True,
                      envdir='/environ',
                      env='prod'),
            # system services
            mock.call('/some/dir/services',
                      'root',
                      mock.ANY, mock.ANY,
                      'command3',
                      '/path/to/sbin/command',
                      as_root=True,
                      down=False,
                      envdir='/environ',
                      env='prod'),
            mock.call('/some/dir/services',
                      'root',
                      mock.ANY, mock.ANY,
                      'command4',
                      '/path/to/other/sbin/command',
                      as_root=True,
                      down=False,
                      envdir='/environ',
                      env='prod')
        ])

        treadmill.utils.create_script.assert_has_calls([
            mock.call('/some/dir/services/command1/log/run', 'logger.run'),
            mock.call('/some/dir/services/command2/log/run', 'logger.run'),
            mock.call('/some/dir/services/command3/log/run', 'logger.run'),
            mock.call('/some/dir/services/command4/log/run', 'logger.run'),
            mock.call('/some/dir/sys/vring.a/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/vring.a/log/run',
                      'logger.run'),
            mock.call('/some/dir/sys/vring.b/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/vring.b/log/run',
                      'logger.run'),
            mock.call('/some/dir/sys/monitor/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/monitor/log/run',
                      'logger.run'),
            mock.call('/some/dir/sys/register/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/register/log/run',
                      'logger.run'),
            mock.call(
                '/some/dir/sys/start_container/run',
                'supervisor.run_sys',
                cmd=('/bin/ls /some/dir/root /bin/ls '
                     '-m -p -i s6-svscan /services')
            ),
            mock.call('/some/dir/sys/start_container/log/run',
                      'logger.run'),
        ])
        treadmill.utils.touch.assert_has_calls([
            mock.call('/some/dir/sys/start_container/down'),
        ])

Example 46

Project: treadmill
Source File: run_test.py
View license
    @mock.patch('pwd.getpwnam', mock.Mock(
        return_value=namedtuple(
            'pwnam',
            ['pw_uid', 'pw_dir', 'pw_shell']
        )(3, '/', '/bin/sh')))
    @mock.patch('treadmill.fs.mkdir_safe', mock.Mock())
    @mock.patch('treadmill.fs.mount_bind', mock.Mock())
    @mock.patch('treadmill.supervisor.create_service', mock.Mock())
    @mock.patch('treadmill.utils.create_script', mock.Mock())
    @mock.patch('treadmill.utils.touch', mock.Mock())
    @mock.patch('treadmill.utils.rootdir',
                mock.Mock(return_value='/test_treadmill'))
    def test__create_supervision_tree(self):
        """Test creation of the supervision tree."""
        # pylint: disable=W0212
        treadmill.subproc.EXECUTABLES = {
            'chroot': '/bin/ls',
            'pid1': '/bin/ls',
        }
        # Access protected module _create_supervision_tree
        # pylint: disable=W0212
        app = utils.to_obj(
            {
                'proid': 'myproid',
                'name': 'myproid.test#0',
                'uniqueid': 'ID1234',
                'environment': 'prod',
                'services': [
                    {
                        'name': 'command1',
                        'command': '/path/to/command',
                        'restart': {
                            'limit': 3,
                            'interval': 60,
                        },
                    }, {
                        'name': 'command2',
                        'command': '/path/to/other/command',
                        'restart': {
                            'limit': 3,
                            'interval': 60,
                        },
                    }
                ],
                'system_services': [
                    {
                        'name': 'command3',
                        'command': '/path/to/sbin/command',
                        'restart': {
                            'limit': 5,
                            'interval': 60,
                        },
                    }, {
                        'name': 'command4',
                        'command': '/path/to/other/sbin/command',
                        'restart': {
                            'limit': 5,
                            'interval': 60,
                        },
                    }
                ],
                'vring': {
                    'cells': ['a', 'b']
                },
            }
        )
        base_dir = '/some/dir'
        events_dir = '/some/dir/appevents'

        treadmill.appmgr.run._create_supervision_tree(
            base_dir,
            events_dir,
            app,
        )

        treadmill.fs.mkdir_safe.assert_has_calls([
            mock.call('/some/dir/root/services'),
            mock.call('/some/dir/services'),
            mock.call('/some/dir/services/command1/log'),
            mock.call('/some/dir/services/command2/log'),
            mock.call('/some/dir/services/command3/log'),
            mock.call('/some/dir/services/command4/log'),
            mock.call('/some/dir/sys/vring.a'),
            mock.call('/some/dir/sys/vring.a/log'),
            mock.call('/some/dir/sys/vring.b'),
            mock.call('/some/dir/sys/vring.b/log'),
            mock.call('/some/dir/sys/monitor'),
            mock.call('/some/dir/sys/monitor/log'),
            mock.call('/some/dir/sys/register'),
            mock.call('/some/dir/sys/register/log'),
            mock.call('/some/dir/sys/start_container'),
            mock.call('/some/dir/sys/start_container/log'),
        ])
        treadmill.fs.mount_bind.assert_called_with(
            '/some/dir/root', '/services', '/some/dir/services',
        )

        pwd.getpwnam.assert_has_calls(
            [
                mock.call('myproid'),
                mock.call('root')
            ],
            any_order=True
        )

        treadmill.supervisor.create_service.assert_has_calls([
            # user services
            mock.call('/some/dir/services',
                      'myproid',
                      mock.ANY, mock.ANY,
                      'command1',
                      '/path/to/command',
                      as_root=True,
                      down=True,
                      envdir='/environ',
                      env='prod'),
            mock.call('/some/dir/services',
                      'myproid',
                      mock.ANY, mock.ANY,
                      'command2',
                      '/path/to/other/command',
                      as_root=True,
                      down=True,
                      envdir='/environ',
                      env='prod'),
            # system services
            mock.call('/some/dir/services',
                      'root',
                      mock.ANY, mock.ANY,
                      'command3',
                      '/path/to/sbin/command',
                      as_root=True,
                      down=False,
                      envdir='/environ',
                      env='prod'),
            mock.call('/some/dir/services',
                      'root',
                      mock.ANY, mock.ANY,
                      'command4',
                      '/path/to/other/sbin/command',
                      as_root=True,
                      down=False,
                      envdir='/environ',
                      env='prod')
        ])

        treadmill.utils.create_script.assert_has_calls([
            mock.call('/some/dir/services/command1/log/run', 'logger.run'),
            mock.call('/some/dir/services/command2/log/run', 'logger.run'),
            mock.call('/some/dir/services/command3/log/run', 'logger.run'),
            mock.call('/some/dir/services/command4/log/run', 'logger.run'),
            mock.call('/some/dir/sys/vring.a/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/vring.a/log/run',
                      'logger.run'),
            mock.call('/some/dir/sys/vring.b/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/vring.b/log/run',
                      'logger.run'),
            mock.call('/some/dir/sys/monitor/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/monitor/log/run',
                      'logger.run'),
            mock.call('/some/dir/sys/register/run',
                      'supervisor.run_sys',
                      cmd=mock.ANY),
            mock.call('/some/dir/sys/register/log/run',
                      'logger.run'),
            mock.call(
                '/some/dir/sys/start_container/run',
                'supervisor.run_sys',
                cmd=('/bin/ls /some/dir/root /bin/ls '
                     '-m -p -i s6-svscan /services')
            ),
            mock.call('/some/dir/sys/start_container/log/run',
                      'logger.run'),
        ])
        treadmill.utils.touch.assert_has_calls([
            mock.call('/some/dir/sys/start_container/down'),
        ])

Example 47

Project: airmozilla
Source File: test_videoinfo.py
View license
    @mock.patch('airmozilla.manage.vidly.logging')
    @mock.patch('airmozilla.manage.vidly.urllib2')
    @mock.patch('requests.head')
    @mock.patch('requests.get')
    @mock.patch('subprocess.Popen')
    def test_fetch_duration_save_locally(
        self, mock_popen, rget, rhead, p_urllib2, p_logging
    ):

        def mocked_urlopen(request):
            return StringIO("""
            <?xml version="1.0"?>
            <Response>
              <Message>OK</Message>
              <MessageCode>7.4</MessageCode>
              <Success>
                <MediaShortLink>xxx999</MediaShortLink>
                <Token>MXCsxINnVtycv6j02ZVIlS4FcWP</Token>
              </Success>
            </Response>
            """)

        p_urllib2.urlopen = mocked_urlopen

        def mocked_head(url, **options):
            if 'file.mpg' in url:
                return Response(
                    '',
                    200
                )
            return Response(
                '',
                302,
                headers={
                    'Location': 'https://otherplace.com/file.mpg'
                }
            )

        rhead.side_effect = mocked_head

        def mocked_get(url, **options):
            return Response(
                '0' * 100000,
                200,
                headers={
                    'Content-Length': 100000
                }
            )

        rget.side_effect = mocked_get

        ffmpeged_urls = []

        def mocked_popen(command, **kwargs):

            url = command[2]
            ffmpeged_urls.append(url)

            class Inner:
                def communicate(self):

                    out = ''
                    if 'abc123' in url:
                        err = "bla bla"
                    elif 'xyz123' in url:
                        err = """
            Duration: 00:19:17.47, start: 0.000000, bitrate: 1076 kb/s
                        """
                    else:
                        raise NotImplementedError(url)
                    return out, err

            return Inner()

        mock_popen.side_effect = mocked_popen

        event = Event.objects.get(title='Test event')
        template = Template.objects.create(
            name='Vid.ly Something',
            content="{{ tag }}"
        )
        event.template = template
        event.template_environment = {'tag': 'abc123'}
        event.save()
        assert event.duration is None

        videoinfo.fetch_durations(save_locally=True)
        event = Event.objects.get(id=event.id)
        assert event.duration is None

        ffmpeged_url, = ffmpeged_urls
        ok_(ffmpeged_url.endswith('abc123.mp4'))

        # need to change to a different tag
        # and make sure it has a VidlySubmission
        VidlySubmission.objects.create(
            event=event,
            url='https://s3.com/asomething.mov',
            tag='xyz123',
            hd=True,
        )
        event.template_environment = {'tag': 'xyz123'}
        event.save()
        videoinfo.fetch_durations(save_locally=True)
        event = Event.objects.get(id=event.id)
        eq_(event.duration, 1157)

        ffmpeged_url, ffmpeged_url2 = ffmpeged_urls
        ok_(ffmpeged_url.endswith('abc123.mp4'))
        ok_(ffmpeged_url2.endswith('xyz123.mp4'))

Example 48

Project: airmozilla
Source File: test_videoinfo.py
View license
    @mock.patch('airmozilla.manage.vidly.logging')
    @mock.patch('airmozilla.manage.vidly.urllib2')
    @mock.patch('requests.head')
    @mock.patch('requests.get')
    @mock.patch('subprocess.Popen')
    def test_fetch_duration_save_locally_some(
        self, mock_popen, rget, rhead, p_urllib2, p_logging
    ):
        """This time we're going to have two events to ponder.
        One is public and one is staff only.
        With passing `save_locally_some` it should do
        `ffmpeg -i http://url...` on the public one and
        `wget https://...; ffmpeg -i /local/file.mpg` on the private one.
        """

        def mocked_urlopen(request):
            return StringIO("""
            <?xml version="1.0"?>
            <Response>
              <Message>OK</Message>
              <MessageCode>7.4</MessageCode>
              <Success>
                <MediaShortLink>xxx999</MediaShortLink>
                <Token>MXCsxINnVtycv6j02ZVIlS4FcWP</Token>
              </Success>
            </Response>
            """)

        p_urllib2.urlopen = mocked_urlopen

        def mocked_head(url, **options):
            # print "HEAD URL", url
            if 'file.mp4' in url:
                return Response(
                    '',
                    200
                )
            return Response(
                '',
                302,
                headers={
                    'Location': 'https://otherplace.com/file.mp4'
                }
            )

        rhead.side_effect = mocked_head

        def mocked_get(url, **options):
            # print "GET URL", url
            return Response(
                '0' * 100000,
                200,
                headers={
                    'Content-Length': 100000
                }
            )

        rget.side_effect = mocked_get

        ffmpeged_urls = []

        def mocked_popen(command, **kwargs):

            url = command[2]
            ffmpeged_urls.append(url)

            class Inner:
                def communicate(self):
                    out = ''
                    if 'otherplace.com/file.mp4' in url:
                        err = """
            Duration: 01:05:00.47, start: 0.000000, bitrate: 1076 kb/s
                        """
                    elif 'xyz123' in url:
                        err = """
            Duration: 00:19:17.47, start: 0.000000, bitrate: 1076 kb/s
                        """
                    else:
                        raise NotImplementedError(url)
                    return out, err

            return Inner()

        mock_popen.side_effect = mocked_popen

        event = Event.objects.get(title='Test event')
        template = Template.objects.create(
            name='Vid.ly Something',
            content="{{ tag }}"
        )
        event.template = template
        event.template_environment = {'tag': 'abc123'}
        assert event.privacy == Event.PRIVACY_PUBLIC
        event.save()

        event2 = Event.objects.create(
            slug='slug2',
            title=event.title,
            start_time=event.start_time,
            placeholder_img=event.placeholder_img,
            privacy=Event.PRIVACY_COMPANY,
            template=template,
            template_environment={'tag': 'xyz123'},
        )

        videoinfo.fetch_durations(save_locally_some=True)
        event = Event.objects.get(id=event.id)
        eq_(event.duration, 3900)

        event2 = Event.objects.get(id=event2.id)
        eq_(event2.duration, 1157)

        ffmpeged_urls.sort()
        ffmpeged_url1, ffmpeged_url2 = ffmpeged_urls
        ok_(ffmpeged_url1.endswith('xyz123.mp4'))
        ok_(ffmpeged_url1.startswith('/'))
        ok_(ffmpeged_url2.endswith('file.mp4'))
        ok_(ffmpeged_url2.startswith('http://'))

Example 49

Project: airmozilla
Source File: test_videoinfo.py
View license
    @mock.patch('airmozilla.manage.vidly.logging')
    @mock.patch('airmozilla.manage.vidly.urllib2')
    @mock.patch('requests.head')
    @mock.patch('requests.get')
    @mock.patch('subprocess.Popen')
    def test_fetch_duration_save_locally(
        self, mock_popen, rget, rhead, p_urllib2, p_logging
    ):

        def mocked_urlopen(request):
            return StringIO("""
            <?xml version="1.0"?>
            <Response>
              <Message>OK</Message>
              <MessageCode>7.4</MessageCode>
              <Success>
                <MediaShortLink>xxx999</MediaShortLink>
                <Token>MXCsxINnVtycv6j02ZVIlS4FcWP</Token>
              </Success>
            </Response>
            """)

        p_urllib2.urlopen = mocked_urlopen

        def mocked_head(url, **options):
            if 'file.mpg' in url:
                return Response(
                    '',
                    200
                )
            return Response(
                '',
                302,
                headers={
                    'Location': 'https://otherplace.com/file.mpg'
                }
            )

        rhead.side_effect = mocked_head

        def mocked_get(url, **options):
            return Response(
                '0' * 100000,
                200,
                headers={
                    'Content-Length': 100000
                }
            )

        rget.side_effect = mocked_get

        ffmpeged_urls = []

        def mocked_popen(command, **kwargs):

            url = command[2]
            ffmpeged_urls.append(url)

            class Inner:
                def communicate(self):

                    out = ''
                    if 'abc123' in url:
                        err = "bla bla"
                    elif 'xyz123' in url:
                        err = """
            Duration: 00:19:17.47, start: 0.000000, bitrate: 1076 kb/s
                        """
                    else:
                        raise NotImplementedError(url)
                    return out, err

            return Inner()

        mock_popen.side_effect = mocked_popen

        event = Event.objects.get(title='Test event')
        template = Template.objects.create(
            name='Vid.ly Something',
            content="{{ tag }}"
        )
        event.template = template
        event.template_environment = {'tag': 'abc123'}
        event.save()
        assert event.duration is None

        videoinfo.fetch_durations(save_locally=True)
        event = Event.objects.get(id=event.id)
        assert event.duration is None

        ffmpeged_url, = ffmpeged_urls
        ok_(ffmpeged_url.endswith('abc123.mp4'))

        # need to change to a different tag
        # and make sure it has a VidlySubmission
        VidlySubmission.objects.create(
            event=event,
            url='https://s3.com/asomething.mov',
            tag='xyz123',
            hd=True,
        )
        event.template_environment = {'tag': 'xyz123'}
        event.save()
        videoinfo.fetch_durations(save_locally=True)
        event = Event.objects.get(id=event.id)
        eq_(event.duration, 1157)

        ffmpeged_url, ffmpeged_url2 = ffmpeged_urls
        ok_(ffmpeged_url.endswith('abc123.mp4'))
        ok_(ffmpeged_url2.endswith('xyz123.mp4'))

Example 50

Project: airmozilla
Source File: test_videoinfo.py
View license
    @mock.patch('airmozilla.manage.vidly.logging')
    @mock.patch('airmozilla.manage.vidly.urllib2')
    @mock.patch('requests.head')
    @mock.patch('requests.get')
    @mock.patch('subprocess.Popen')
    def test_fetch_duration_save_locally_some(
        self, mock_popen, rget, rhead, p_urllib2, p_logging
    ):
        """This time we're going to have two events to ponder.
        One is public and one is staff only.
        With passing `save_locally_some` it should do
        `ffmpeg -i http://url...` on the public one and
        `wget https://...; ffmpeg -i /local/file.mpg` on the private one.
        """

        def mocked_urlopen(request):
            return StringIO("""
            <?xml version="1.0"?>
            <Response>
              <Message>OK</Message>
              <MessageCode>7.4</MessageCode>
              <Success>
                <MediaShortLink>xxx999</MediaShortLink>
                <Token>MXCsxINnVtycv6j02ZVIlS4FcWP</Token>
              </Success>
            </Response>
            """)

        p_urllib2.urlopen = mocked_urlopen

        def mocked_head(url, **options):
            # print "HEAD URL", url
            if 'file.mp4' in url:
                return Response(
                    '',
                    200
                )
            return Response(
                '',
                302,
                headers={
                    'Location': 'https://otherplace.com/file.mp4'
                }
            )

        rhead.side_effect = mocked_head

        def mocked_get(url, **options):
            # print "GET URL", url
            return Response(
                '0' * 100000,
                200,
                headers={
                    'Content-Length': 100000
                }
            )

        rget.side_effect = mocked_get

        ffmpeged_urls = []

        def mocked_popen(command, **kwargs):

            url = command[2]
            ffmpeged_urls.append(url)

            class Inner:
                def communicate(self):
                    out = ''
                    if 'otherplace.com/file.mp4' in url:
                        err = """
            Duration: 01:05:00.47, start: 0.000000, bitrate: 1076 kb/s
                        """
                    elif 'xyz123' in url:
                        err = """
            Duration: 00:19:17.47, start: 0.000000, bitrate: 1076 kb/s
                        """
                    else:
                        raise NotImplementedError(url)
                    return out, err

            return Inner()

        mock_popen.side_effect = mocked_popen

        event = Event.objects.get(title='Test event')
        template = Template.objects.create(
            name='Vid.ly Something',
            content="{{ tag }}"
        )
        event.template = template
        event.template_environment = {'tag': 'abc123'}
        assert event.privacy == Event.PRIVACY_PUBLIC
        event.save()

        event2 = Event.objects.create(
            slug='slug2',
            title=event.title,
            start_time=event.start_time,
            placeholder_img=event.placeholder_img,
            privacy=Event.PRIVACY_COMPANY,
            template=template,
            template_environment={'tag': 'xyz123'},
        )

        videoinfo.fetch_durations(save_locally_some=True)
        event = Event.objects.get(id=event.id)
        eq_(event.duration, 3900)

        event2 = Event.objects.get(id=event2.id)
        eq_(event2.duration, 1157)

        ffmpeged_urls.sort()
        ffmpeged_url1, ffmpeged_url2 = ffmpeged_urls
        ok_(ffmpeged_url1.endswith('xyz123.mp4'))
        ok_(ffmpeged_url1.startswith('/'))
        ok_(ffmpeged_url2.endswith('file.mp4'))
        ok_(ffmpeged_url2.startswith('http://'))