mock.MagicMock

Here are the examples of the python api mock.MagicMock taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

187 Examples 7

Example 101

Project: ggrc-core Source File: test_query_helper.py
  def test_expression_keys(self):
    """ test expression keys function

    Make sure it works with:
      empty query
      simple query
      complex query
      invalid complex query
    """
    # pylint: disable=protected-access
    # needed for testing protected function inside the query helper
    query = mock.MagicMock()
    helper = query_helper.QueryHelper(query)

    expressions = [
        (set(), {}),
        (set(["key_1"]), {
            "left": "key_1",
            "op": {"name": "="},
            "right": "",
        }),
        (set(["key_1", "key_2"]), {
            "left": {
                "left": "key_2",
                "op": {"name": "="},
                "right": "",
            },
            "op": {"name": "AND"},
            "right": {
                "left": "key_1",
                "op": {"name": "!="},
                "right": "",
            },
        }),
        (set(), {
            "left": {
                "left": "5",
                "op": {"name": "="},
                "right": "",
            },
            "op": {"name": "="},
            "right": {
                "left": "key_1",
                "op": {"name": "!="},
                "right": "",
            },
        }),
    ]

    for expected_result, expression in expressions:
      self.assertEqual(expected_result, helper._expression_keys(expression))

Example 102

Project: asiaq Source File: test_disco_vpc_peerings.py
    @patch('disco_aws_automation.disco_vpc.DiscoMetaNetwork')
    @patch('disco_aws_automation.disco_vpc_peerings.read_config')
    @patch('boto3.client')
    def test_update_peerings_with_existing_ones(
            self, boto3_client_mock, config_mock, meta_network_mock):
        """ Verify new peering connections are created properly while there are existing ones """

        config_mock.return_value = get_mock_config({
            'peerings': {
                'connection_1': 'mock-vpc-1:sandbox/intranet mock-vpc-2:sandbox/intranet',
                'connection_2': 'mock-vpc-1:sandbox/intranet mock-vpc-3:sandbox/intranet'
            }
        })

        # pylint: disable=C0103
        def _describe_vpc_peering_connections_mock(Filters):
            count = 0
            for peering_filter in Filters:
                if (peering_filter['Name'] == 'accepter-vpc-info.vpc-id' and
                        peering_filter['Values'][0] == 'mock_vpc_1_id') or \
                    (peering_filter['Name'] == 'requester-vpc-info.vpc-id' and
                     peering_filter['Values'][0] == 'mock_vpc_2_id'):
                    count += 1

            if count == 2 or \
                    (len(Filters) == 1 and
                     Filters[0]['Name'] == 'accepter-vpc-info.vpc-id' and
                     Filters[0]['Values'][0] == 'mock_vpc_1_id'):
                return {'VpcPeeringConnections': [
                    {'Status': {'Code': 'active'},
                     'VpcPeeringConnectionId': 'mock_vpc_peering_id_existing',
                     'AccepterVpcInfo': {'VpcId': 'mock_vpc_1_id'},
                     'RequesterVpcInfo': {'VpcId': 'mock_vpc_2_id'}}]}
            else:
                return {'VpcPeeringConnections': []}

        network_1_mock = MagicMock()
        network_1_mock.network_cidr = '10.0.23.23/23'
        network_1_mock.name = 'intranet'
        network_2_mock = MagicMock()
        network_2_mock.network_cidr = '10.0.123.123/23'
        network_2_mock.name = 'intranet'
        network_3_mock = MagicMock()
        network_3_mock.network_cidr = '10.2.123.123/23'
        network_3_mock.name = 'intranet'

        def _mock_meta_network(network, vpc):
            if vpc.vpc['VpcId'] == 'mock_vpc_1_id':
                return network_1_mock
            elif vpc.vpc['VpcId'] == 'mock_vpc_2_id':
                return network_2_mock
            elif vpc.vpc['VpcId'] == 'mock_vpc_3_id':
                return network_3_mock
            return None

        client_mock = MagicMock()
        client_mock.describe_vpc_peering_connections.side_effect = _describe_vpc_peering_connections_mock
        client_mock.create_vpc_peering_connection.return_value = {
            'VpcPeeringConnection': {'VpcPeeringConnectionId': 'mock_vpc_peering_id_new'}}
        client_mock.describe_vpcs.side_effect = _describe_vpcs_mock
        self.mock_vpc.boto3_ec2.describe_vpcs.side_effect = _describe_vpcs_mock
        self.mock_vpc.boto3_ec2.describe_route_tables.return_value = {
            'RouteTables': [{'Tags': [{'Key': 'Name', 'Value': 'mock-vpc-1_intranet'}],
                             'Routes': [{'VpcPeeringConnectionId': 'mock_vpc_peering_id_existing',
                                         'DestinationCidrBlock': network_2_mock.network_cidr}]},
                            {'Tags': [{'Key': 'Name', 'Value': 'mock-vpc-2_intranet'}],
                             'Routes': [{'VpcPeeringConnectionId': 'mock_vpc_peering_id_existing',
                                         'DestinationCidrBlock': network_1_mock.network_cidr}]}]}
        boto3_client_mock.return_value = client_mock

        meta_network_mock.side_effect = _mock_meta_network
        # End setting up test

        # Calling method under test
        self.disco_vpc_peerings.update_peering_connections()

        # Asserting correct behavior
        client_mock.create_vpc_peering_connection.assert_called_once_with(
            PeerVpcId='mock_vpc_3_id', VpcId='mock_vpc_1_id')
        client_mock.accept_vpc_peering_connection.assert_called_once_with(
            VpcPeeringConnectionId='mock_vpc_peering_id_new')
        expected_calls_network_1 = [call('mock_vpc_peering_id_new',
                                         str(network_3_mock.network_cidr)),
                                    call('mock_vpc_peering_id_existing',
                                         str(network_2_mock.network_cidr))]
        network_1_mock.create_peering_route.assert_has_calls(
            expected_calls_network_1)

        network_2_mock.create_peering_route.assert_called_once_with(
            'mock_vpc_peering_id_existing', str(network_1_mock.network_cidr))

        network_3_mock.create_peering_route.assert_called_once_with(
            'mock_vpc_peering_id_new', str(network_1_mock.network_cidr))

Example 103

Project: dd-agent Source File: test_process.py
    def test_relocated_procfs(self):
        from utils.platform import Platform
        import tempfile
        import shutil
        import uuid

        already_linux = Platform.is_linux()
        unique_process_name = str(uuid.uuid4())
        my_procfs = tempfile.mkdtemp()

        def _fake_procfs(arg, root=my_procfs):
            for key, val in arg.iteritems():
                path = os.path.join(root, key)
                if isinstance(val, dict):
                    os.mkdir(path)
                    _fake_procfs(val, path)
                else:
                    with open(path, "w") as f:
                        f.write(str(val))
        _fake_procfs({
            '1': {
                'status': (
                    "Name:\t%s\nThreads:\t1\n"
                ) % unique_process_name,
                'stat': ('1 (%s) S 0 1 1 ' + ' 0' * 46) % unique_process_name,
                'cmdline': unique_process_name,

            },
            'stat': (
                "cpu  13034 0 18596 380856797 2013 2 2962 0 0 0\n"
                "btime 1448632481\n"
            ),
        })

        config = {
            'init_config': {
                'procfs_path': my_procfs
            },
            'instances': [{
                'name': 'moved_procfs',
                'search_string': [unique_process_name],
                'exact_match': False,
                'ignored_denied_access': True,
                'thresholds': {'warning': [1, 10], 'critical': [1, 100]},
            }]
        }

        version = int(psutil.__version__.replace(".", ""))
        try:
            def import_mock(name, i_globals={}, i_locals={}, fromlist=[], level=-1, orig_import=__import__):
                # _psutil_linux and _psutil_posix are the C bindings; use a mock for those
                if name in ('_psutil_linux', '_psutil_posix') or level >= 1 and ('_psutil_linux' in fromlist or '_psutil_posix' in fromlist):
                    m = MagicMock()
                    # the import system will ask us for our own name
                    m._psutil_linux = m
                    m._psutil_posix = m
                    # there's a version safety check in psutil/__init__.py; this skips it
                    m.version = version
                    return m
                return orig_import(name, i_globals, i_locals, fromlist, level)

            # contextlib.nested is deprecated in favor of with MGR1, MGR2, ... etc, but we have too many mocks to fit on one line and apparently \ line
            # continuation is not flake8 compliant, even when semantically required (as here). Patch is unlikely to throw errors that are suppressed, so
            # the main downside of contextlib is avoided.
            with contextlib.nested(patch('sys.platform', 'linux'),
                                   patch('socket.AF_PACKET', create=True),
                                   patch('__builtin__.__import__', side_effect=import_mock)):
                if not already_linux:
                    # Reloading psutil fails on linux, but we only need to do so if we didn't start out on a linux platform
                    reload(psutil)
                assert Platform.is_linux()

                self.run_check(config, mocks={'get_pagefault_stats': noop_get_pagefault_stats})
        finally:
            shutil.rmtree(my_procfs)
            if not already_linux:
                # restore the original psutil that doesn't have our mocks
                reload(psutil)
            else:
                psutil.PROCFS_PATH = '/proc'

        expected_tags = self.generate_expected_tags(config['instances'][0])
        self.assertServiceCheckOK('process.up', count=1, tags=['process:moved_procfs'])

        self.assertMetric('system.processes.number', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.threads', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.run_time.avg', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.run_time.max', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.run_time.min', at_least=1, tags=expected_tags)

        self.coverage_report()

Example 104

Project: evelink Source File: test_eve_who.py
    def test_member_list(self):
        mock_fetch = mock.MagicMock()
        mock_fetch.return_value = """
                {"info":{
                    "corporation_id":"869043665",
                    "name":"Woopatang",
                    "member_count":"3"
                    },
                "characters":[
                    {
                    "character_id":"403163173",
                    "corporation_id":"869043665",
                    "alliance_id":"99001433",
                    "name":"Aeryn Tiberius"
                    },
                    {
                    "character_id":"149932493",
                    "corporation_id":"869043665",
                    "alliance_id":"99001433",
                    "name":"Agamemon"
                    },
                    {
                    "character_id":"90464284",
                    "corporation_id":"869043665",
                    "alliance_id":"99001433",
                    "name":"Aidera Boirelle"
                    }
                ]}
            """.strip()

        evewho = evelink_evewho.EVEWho(url_fetch_func=mock_fetch)
        results = evewho._member_list(869043665, 'corplist')

        self.assertEqual(results, [
            {
                'alli_id': 99001433,
                'char_id': 403163173,
                'name': 'Aeryn Tiberius',
                'corp_id': 869043665
            },
            {
                'alli_id': 99001433,
                'char_id': 149932493,
                'name': 'Agamemon',
                'corp_id': 869043665
            },
            {
                'alli_id': 99001433,
                'char_id': 90464284,
                'name': 'Aidera Boirelle',
                'corp_id': 869043665
            }
        ])

        fetch_query_dict = parse_qs(
                urlparse(mock_fetch.mock_calls[0][1][0]).query)
        expected_query_dict = parse_qs('type=corplist&id=869043665&page=0')

        self.assertEqual(fetch_query_dict, expected_query_dict)

Example 105

Project: edx-analytics-pipeline Source File: test_total_enrollments.py
    def run_task(self, registrations, enrollments, date, weeks, offset=None, history=None, blacklist=None):
        """
        Run task with fake targets.

        Returns:
            the task output as a pandas dataframe.
        """

        parsed_date = datetime.datetime.strptime(date, '%Y-%m-%d').date()

        # Make offsets None if it was not specified.
        task = WeeklyAllUsersAndEnrollments(
            name='fake_name',
            n_reduce_tasks="fake_n_reduce_tasks",
            offsets='fake_offsets' if offset else None,
            history='fake_history' if history else None,
            destination='fake_destination',
            date=parsed_date,
            weeks=weeks,
            credentials=None,
            blacklist=blacklist
        )

        # Mock the input and output targets

        def reformat(string):
            """Reformat string to make it like a TSV."""
            return textwrap.dedent(string).strip().replace(' ', '\t')

        if enrollments is None:
            enrollments = """
                course_1 2013-03-01 1
                course_1 2013-03-30 2
                course_2 2013-03-07 1
                course_2 2013-03-08 1
                course_2 2013-03-10 1
                course_2 2013-03-13 1
                course_3 2013-03-15 1
                course_3 2013-03-18 1
                course_3 2013-03-19 1
                """

        input_targets = {
            'enrollments': FakeTarget(value=reformat(enrollments)),
            'registrations': FakeTarget(value=reformat(registrations))
        }

        # Mock offsets only if specified.
        if offset:
            input_targets.update({'offsets': FakeTarget(value=reformat(offset))})

        # Mock history only if specified.
        if history:
            input_targets.update({'history': FakeTarget(value=reformat(history))})

        # Mock blacklist only if specified.
        if blacklist:
            input_targets.update({'blacklist': FakeTarget(value=reformat(blacklist))})

        task.input = MagicMock(return_value=input_targets)

        output_target = FakeTarget()
        task.output = MagicMock(return_value=output_target)

        # Run the task and parse the output into a pandas dataframe

        task.run()

        data = output_target.buffer.read()
        result = pandas.read_csv(StringIO(data),
                                 na_values=['-'],
                                 index_col=self.row_label('header'))

        return result

Example 106

Project: cgstudiomap Source File: testmock.py
    def test_mock_calls(self):
        mock = MagicMock()

        # need to do this because MagicMock.mock_calls used to just return
        # a MagicMock which also returned a MagicMock when __eq__ was called
        self.assertIs(mock.mock_calls == [], True)

        mock = MagicMock()
        mock()
        expected = [('', (), {})]
        self.assertEqual(mock.mock_calls, expected)

        mock.foo()
        expected.append(call.foo())
        self.assertEqual(mock.mock_calls, expected)
        # intermediate mock_calls work too
        self.assertEqual(mock.foo.mock_calls, [('', (), {})])

        mock = MagicMock()
        mock().foo(1, 2, 3, a=4, b=5)
        expected = [
            ('', (), {}), ('().foo', (1, 2, 3), dict(a=4, b=5))
        ]
        self.assertEqual(mock.mock_calls, expected)
        self.assertEqual(mock.return_value.foo.mock_calls,
                         [('', (1, 2, 3), dict(a=4, b=5))])
        self.assertEqual(mock.return_value.mock_calls,
                         [('foo', (1, 2, 3), dict(a=4, b=5))])

        mock = MagicMock()
        mock().foo.bar().baz()
        expected = [
            ('', (), {}), ('().foo.bar', (), {}),
            ('().foo.bar().baz', (), {})
        ]
        self.assertEqual(mock.mock_calls, expected)
        self.assertEqual(mock().mock_calls,
                         call.foo.bar().baz().call_list())

        for kwargs in dict(), dict(name='bar'):
            mock = MagicMock(**kwargs)
            int(mock.foo)
            expected = [('foo.__int__', (), {})]
            self.assertEqual(mock.mock_calls, expected)

            mock = MagicMock(**kwargs)
            mock.a()()
            expected = [('a', (), {}), ('a()', (), {})]
            self.assertEqual(mock.mock_calls, expected)
            self.assertEqual(mock.a().mock_calls, [call()])

            mock = MagicMock(**kwargs)
            mock(1)(2)(3)
            self.assertEqual(mock.mock_calls, call(1)(2)(3).call_list())
            self.assertEqual(mock().mock_calls, call(2)(3).call_list())
            self.assertEqual(mock()().mock_calls, call(3).call_list())

            mock = MagicMock(**kwargs)
            mock(1)(2)(3).a.b.c(4)
            self.assertEqual(mock.mock_calls,
                             call(1)(2)(3).a.b.c(4).call_list())
            self.assertEqual(mock().mock_calls,
                             call(2)(3).a.b.c(4).call_list())
            self.assertEqual(mock()().mock_calls,
                             call(3).a.b.c(4).call_list())

            mock = MagicMock(**kwargs)
            int(mock().foo.bar().baz())
            last_call = ('().foo.bar().baz().__int__', (), {})
            self.assertEqual(mock.mock_calls[-1], last_call)
            self.assertEqual(mock().mock_calls,
                             call.foo.bar().baz().__int__().call_list())
            self.assertEqual(mock().foo.bar().mock_calls,
                             call.baz().__int__().call_list())
            self.assertEqual(mock().foo.bar().baz.mock_calls,
                             call().__int__().call_list())

Example 107

Project: golem Source File: test_taskcomputer.py
    def test_computation(self):
        task_server = MagicMock()
        task_server.get_task_computer_root.return_value = self.path
        task_server.config_desc = config_desc()
        tc = TaskComputer("ABC", task_server, use_docker_machine_manager=False)

        ctd = ComputeTaskDef()
        ctd.task_id = "xyz"
        ctd.subtask_id = "xxyyzz"
        ctd.return_address = "10.10.10.10"
        ctd.return_port = 10203
        ctd.key_id = "key"
        ctd.task_owner = "owner"
        ctd.src_code = "cnt=0\nfor i in range(10000):\n\tcnt += 1\noutput={'data': cnt, 'result_type': 0}"
        ctd.extra_data = {}
        ctd.short_description = "add cnt"
        ctd.deadline = timeout_to_deadline(10)
        self.assertEqual(len(tc.assigned_subtasks), 0)
        tc.task_given(ctd)
        self.assertEqual(tc.assigned_subtasks["xxyyzz"], ctd)
        self.assertLessEqual(tc.assigned_subtasks["xxyyzz"].deadline, timeout_to_deadline(10))
        self.assertEqual(tc.task_to_subtask_mapping["xyz"], "xxyyzz")
        tc.task_server.request_resource.assert_called_with("xyz",  tc.resource_manager.get_resource_header("xyz"),
                                                           "10.10.10.10", 10203, "key", "owner")
        assert tc.task_resource_collected("xyz")
        tc.task_server.unpack_delta.assert_called_with(tc.dir_manager.get_task_resource_dir("xyz"), None, "xyz")
        assert len(tc.current_computations) == 0
        assert tc.assigned_subtasks.get("xxyyzz") is None
        task_server.send_task_failed.assert_called_with("xxyyzz", "xyz", "Host direct task not supported",
                                                        "10.10.10.10", 10203, "key", "owner", "ABC")

        tc.support_direct_computation = True
        tc.task_given(ctd)
        assert tc.task_resource_collected("xyz")
        assert not tc.waiting_for_task
        assert len(tc.current_computations) == 1
        self.__wait_for_tasks(tc)

        prev_task_failed_count = task_server.send_task_failed.call_count
        self.assertFalse(tc.counting_task)
        self.assertEqual(len(tc.current_computations), 0)
        self.assertIsNone(tc.assigned_subtasks.get("xxyyzz"))
        assert task_server.send_task_failed.call_count == prev_task_failed_count
        self.assertTrue(task_server.send_results.called)
        args = task_server.send_results.call_args[0]
        self.assertEqual(args[0], "xxyyzz")
        self.assertEqual(args[1], "xyz")
        self.assertEqual(args[2]["data"], 10000)
        self.assertGreater(args[3], 0)
        self.assertLess(args[3], 10)
        self.assertEqual(args[4], "10.10.10.10")
        self.assertEqual(args[5], 10203)
        self.assertEqual(args[6], "key")
        self.assertEqual(args[7], "owner")
        self.assertEqual(args[8], "ABC")

        ctd.subtask_id = "aabbcc"
        ctd.src_code = "raise Exception('some exception')"
        ctd.deadline = timeout_to_deadline(5)
        tc.task_given(ctd)
        self.assertEqual(tc.assigned_subtasks["aabbcc"], ctd)
        self.assertLessEqual(tc.assigned_subtasks["aabbcc"].deadline, timeout_to_deadline(5))
        self.assertEqual(tc.task_to_subtask_mapping["xyz"], "aabbcc")
        tc.task_server.request_resource.assert_called_with("xyz",  tc.resource_manager.get_resource_header("xyz"),
                                                           "10.10.10.10", 10203, "key", "owner")
        self.assertTrue(tc.task_resource_collected("xyz"))
        self.__wait_for_tasks(tc)

        self.assertFalse(tc.counting_task)
        self.assertEqual(len(tc.current_computations), 0)
        self.assertIsNone(tc.assigned_subtasks.get("aabbcc"))
        task_server.send_task_failed.assert_called_with("aabbcc", "xyz", 'some exception', "10.10.10.10",
                                                        10203, "key", "owner", "ABC")

        ctd.subtask_id = "aabbcc2"
        ctd.src_code = "print 'Hello world'"
        ctd.timeout = timeout_to_deadline(5)
        tc.task_given(ctd)
        self.assertTrue(tc.task_resource_collected("xyz"))
        self.__wait_for_tasks(tc)

        task_server.send_task_failed.assert_called_with("aabbcc2", "xyz", "Wrong result format", "10.10.10.10", 10203,
                                                        "key", "owner", "ABC")

        ctd.subtask_id = "xxyyzz2"
        ctd.timeout = timeout_to_deadline(1)
        tc.task_given(ctd)
        self.assertTrue(tc.task_resource_collected("xyz"))
        tt = tc.current_computations[0]
        tc.task_computed(tc.current_computations[0])
        self.assertEqual(len(tc.current_computations), 0)
        task_server.send_task_failed.assert_called_with("xxyyzz2", "xyz", "Wrong result format", "10.10.10.10", 10203,
                                                        "key", "owner", "ABC")
        tt.end_comp()
        time.sleep(0.5)
        if tt.is_alive():
            tt.join(timeout=5)

Example 108

Project: OpenPoGoBot Source File: google_path_finder_test.py
    @staticmethod
    def test_path():

        client = Mock()
        client.directions = MagicMock(return_value=[
            {
                "legs": [
                    {
                        "steps": [
                            {
                                "end_location": {
                                    "lat": 51.5043872,
                                    "lng": -0.0741802
                                }
                            },
                            {
                                "end_location": {
                                    "lat": 51.5050996,
                                    "lng": -0.0747055
                                }
                            }
                        ]
                    },
                    {
                        "steps": [
                            {
                                "end_location": {
                                    "lat": 51.5060607,
                                    "lng": -0.0746535
                                }
                            }
                        ]
                    }
                ]
            }
        ])

        path_finder = GooglePathFinder(create_core_test_config(), client)

        path = path_finder.path(51.5043872, -0.0741802, 51.5060435, -0.073983)

        assert len(path) == 4

        lat, lng = path[0]
        assert lat == 51.5043872
        assert lng == -0.0741802

        lat, lng = path[1]
        assert lat == 51.5050996
        assert lng == -0.0747055

        lat, lng = path[2]
        assert lat == 51.5060607
        assert lng == -0.0746535

        lat, lng = path[3]
        assert lat == 51.5060435
        assert lng == -0.073983

Example 109

Project: srst2 Source File: test_srst2.py
	@patch('srst2.open', create=True)
	@patch('srst2.logging')
	@patch('srst2.check_command_versions')
	@patch('srst2.run_command')
	@patch('srst2.os.path')
	@patch('srst2.os.environ')
	def test_get_pileup_with_overides(self, env_mock, path_mock, run_mock,
									  version_mock, logging_mock, open_mock):
		fake_env_variables = {'SRST2_SAMTOOLS': '/usr/bin/samtools',
							  'SRST2_BOWTIE2': '/usr/bin/bowtie2',
							  'SRST2_BOWTIE2_BUILD': '/usr/bin/bowtie2-build'}
		env_mock.get.side_effect = fake_env_variables.get
		path_mock.isfile.side_effect = lambda f: 'missing' not in f
		arg_mock = MagicMock()
		arg_mock.mapq = 30
		arg_mock.baseq = 40
		arg_mock.samtools_args = []
		arg_mock.keep_interim_alignment = True # They're not actually created
		fake_file = MagicMock()
		fake_open_context = MagicMock(**{'__enter__.return_value': fake_file})
		open_mock.return_value = fake_open_context
		srst2.get_pileup(arg_mock, 'mapping_file', 'raw_bowtie_sam',
						 'bowtie_sam_mod', 'fasta', 'pileup')
		
		expected_samtools_view_command = [
			'/usr/bin/samtools',
			'view',
			'-b',
			'-o', 'mapping_file.unsorted.bam',
			'-q', '30',
			'-S', 'bowtie_sam_mod'
		]
		run_mock.assert_any_call(expected_samtools_view_command)
		
		expected_samtools_sort_command = [
			'/usr/bin/samtools',
			'sort',
			'mapping_file.unsorted.bam',
			'mapping_file.sorted'
		]
		run_mock.assert_any_call(expected_samtools_sort_command)

		expected_mpileup_command = [
			'/usr/bin/samtools',
			'mpileup',
			'-L', '1000',
			'-f', 'fasta',
			'-Q', '40',
			'-q', '30',
			'-B', 'mapping_file.sorted.bam'
		]
		run_mock.assert_any_call(expected_mpileup_command,
									stdout=fake_file)

Example 110

Project: td-client-python Source File: schedule_api_test.py
def test_history_success():
    td = api.API("APIKEY")
    body = b"""
        {
            "history": [
                {
                    "query": "SELECT COUNT(1) FROM nasdaq;",
                    "type": "presto",
                    "priority": 0,
                    "retry_limit": 0,
                    "duration": 1,
                    "status": "success",
                    "cpu_time": null,
                    "result_size": 30,
                    "job_id": "12345",
                    "created_at": "2016-04-13 05:24:59 UTC",
                    "updated_at": "2016-04-13 05:25:02 UTC",
                    "start_at": "2016-04-13 05:25:00 UTC",
                    "end_at": "2016-04-13 05:25:01 UTC",
                    "num_records": 1,
                    "database": "sample_datasets",
                    "user_name": "Ryuta Kamizono",
                    "result": "",
                    "url": "https://console.treasuredata.com/jobs/12345",
                    "hive_result_schema": "[[\\"_col0\\", \\"bigint\\"]]",
                    "organization": null,
                    "scheduled_at": ""
                },
                {
                    "query": "SELECT COUNT(1) FROM nasdaq;",
                    "type": "presto",
                    "priority": 0,
                    "retry_limit": 0,
                    "duration": 1,
                    "status": "success",
                    "cpu_time": null,
                    "result_size": 30,
                    "job_id": "67890",
                    "created_at": "2016-04-13 05:24:59 UTC",
                    "updated_at": "2016-04-13 05:25:02 UTC",
                    "start_at": "2016-04-13 05:25:00 UTC",
                    "end_at": "2016-04-13 05:25:01 UTC",
                    "num_records": 1,
                    "database": "sample_datasets",
                    "user_name": "Ryuta Kamizono",
                    "result": "",
                    "url": "https://console.treasuredata.com/jobs/67890",
                    "hive_result_schema": "[[\\"_col0\\", \\"bigint\\"]]",
                    "organization": null,
                    "scheduled_at": ""
                }
            ],
            "count": 2,
            "from": 0,
            "to": 20
        }
    """
    td.get = mock.MagicMock(return_value=make_response(200, body))
    history = td.history("foo", 0, 3)
    td.get.assert_called_with("/v3/schedule/history/foo", {"from": "0", "to": "3"})

Example 111

Project: barman Source File: test_wal_archiver.py
    @patch('os.unlink')
    @patch('barman.wal_archiver.FileWalArchiver.get_next_batch')
    @patch('barman.wal_archiver.FileWalArchiver.archive_wal')
    @patch('shutil.move')
    @patch('datetime.datetime')
    def test_archive(self, datetime_mock, move_mock, archive_wal_mock,
                     get_next_batch_mock, unlink_mock, capsys, caplog):
        """
        Test FileWalArchiver.archive method
        """
        fxlogdb_mock = MagicMock()
        backup_manager = MagicMock()
        archiver = FileWalArchiver(backup_manager)
        archiver.config.name = "test_server"

        wal_info = WalFileInfo(name="test_wal_file")
        wal_info.orig_filename = "test_wal_file"

        batch = WalArchiverQueue([wal_info])
        assert batch.size == 1
        assert batch.run_size == 1
        get_next_batch_mock.return_value = batch
        archive_wal_mock.side_effect = DuplicateWalFile

        archiver.archive(fxlogdb_mock)

        out, err = capsys.readouterr()
        assert ("\tError: %s is already present in server %s. "
                "File moved to errors directory." %
                (wal_info.name, archiver.config.name)) in out

        assert ("\tError: %s is already present in server %s. "
                "File moved to errors directory." %
                (wal_info.name, archiver.config.name)) in caplog.text

        archive_wal_mock.side_effect = MatchingDuplicateWalFile
        archiver.archive(fxlogdb_mock)
        unlink_mock.assert_called_with(wal_info.orig_filename)

        # Test batch errors
        caplog_reset(caplog)
        datetime_mock.utcnow.strftime.return_value = 'test_time'
        batch.errors = ['testfile_1', 'testfile_2']
        archive_wal_mock.side_effect = DuplicateWalFile
        archiver.archive(fxlogdb_mock)
        out, err = capsys.readouterr()

        assert ("Some unknown objects have been found while "
                "processing xlog segments for %s. "
                "Objects moved to errors directory:" %
                archiver.config.name) in out

        assert ("Archiver is about to move %s unexpected file(s) to errors "
                "directory for %s from %s" %
                (len(batch.errors),
                 archiver.config.name,
                 archiver.name)) in caplog.text

        assert ("Moving unexpected file for %s from %s: %s" %
                (archiver.config.name,
                 archiver.name, 'testfile_1')) in caplog.text

        assert ("Moving unexpected file for %s from %s: %s" %
                (archiver.config.name,
                 archiver.name, 'testfile_2')) in caplog.text

        move_mock.assert_any_call(
            'testfile_1',
            os.path.join(archiver.config.errors_directory,
                         "%s.%s.unknown" % ('testfile_1', 'test_time')))

        move_mock.assert_any_call(
            'testfile_2',
            os.path.join(archiver.config.errors_directory,
                         "%s.%s.unknown" % ('testfile_2', 'test_time')))

Example 112

Project: edx-platform Source File: base.py
    def test_full_pipeline_succeeds_registering_new_account(self):
        # First, create, the request and strategy that store pipeline state.
        # Mock out wire traffic.
        request, strategy = self.get_request_and_strategy(
            auth_entry=pipeline.AUTH_ENTRY_REGISTER, redirect_uri='social:complete')
        strategy.request.backend.auth_complete = mock.MagicMock(return_value=self.fake_auth_complete(strategy))

        # Begin! Grab the registration page and check the login control on it.
        self.assert_register_response_before_pipeline_looks_correct(self.client.get('/register'))

        # The pipeline starts by a user GETting /auth/login/<provider>.
        # Synthesize that request and check that it redirects to the correct
        # provider page.
        self.assert_redirect_to_provider_looks_correct(self.client.get(
            pipeline.get_login_url(self.provider.provider_id, pipeline.AUTH_ENTRY_LOGIN)))

        # Next, the provider makes a request against /auth/complete/<provider>.
        # pylint: disable=protected-access
        self.assert_redirect_to_register_looks_correct(actions.do_complete(request.backend, social_views._do_login))

        # At this point we know the pipeline has resumed correctly. Next we
        # fire off the view that displays the registration form.
        with self._patch_edxmako_current_request(request):
            self.assert_register_response_in_pipeline_looks_correct(
                student_views.register_user(strategy.request), pipeline.get(request)['kwargs'])

        # Next, we invoke the view that handles the POST. Not all providers
        # supply email. Manually add it as the user would have to; this
        # also serves as a test of overriding provider values. Always provide a
        # password for us to check that we override it properly.
        overridden_password = strategy.request.POST.get('password')
        email = '[email protected]'

        if not strategy.request.POST.get('email'):
            strategy.request.POST = self.get_registration_post_vars({'email': email})

        # The user must not exist yet...
        with self.assertRaises(auth_models.User.DoesNotExist):
            self.get_user_by_email(strategy, email)

        # ...but when we invoke create_account the existing edX view will make
        # it, but not social auths. The pipeline creates those later.
        with self._patch_edxmako_current_request(strategy.request):
            self.assert_json_success_response_looks_correct(student_views.create_account(strategy.request))
        # We've overridden the user's password, so authenticate() with the old
        # value won't work:
        created_user = self.get_user_by_email(strategy, email)
        self.assert_password_overridden_by_pipeline(overridden_password, created_user.username)

        # At this point the user object exists, but there is no associated
        # social auth.
        self.assert_social_auth_does_not_exist_for_user(created_user, strategy)

        # We should be redirected back to the complete page, setting
        # the "logged in" cookie for the marketing site.
        self.assert_logged_in_cookie_redirect(actions.do_complete(
            request.backend, social_views._do_login, request.user, None,  # pylint: disable=protected-access
            redirect_field_name=auth.REDIRECT_FIELD_NAME
        ))

        # Set the cookie and try again
        self.set_logged_in_cookies(request)
        self.assert_redirect_to_dashboard_looks_correct(
            actions.do_complete(strategy.request.backend, social_views._do_login, user=created_user))
        # Now the user has been redirected to the dashboard. Their third party account should now be linked.
        self.assert_social_auth_exists_for_user(created_user, strategy)
        self.assert_account_settings_context_looks_correct(account_settings_context(request), created_user, linked=True)

Example 113

Project: zerocloud Source File: test_queue.py
def do_setup(the_object_server):
    utils.HASH_PATH_SUFFIX = 'endcap'
    global _testdir, _test_servers, _test_sockets, \
        _orig_container_listing_limit, _test_coros, _orig_SysLogHandler, \
        _orig_POLICIES, _test_POLICIES
    _orig_POLICIES = storage_policy._POLICIES
    _orig_SysLogHandler = utils.SysLogHandler
    utils.SysLogHandler = mock.MagicMock()
    # Since we're starting up a lot here, we're going to test more than
    # just chunked puts; we're also going to test parts of
    # proxy_server.Application we couldn't get to easily otherwise.
    _testdir = \
        os.path.join(mkdtemp(), 'tmp_test_proxy_server_chunked')
    mkdirs(_testdir)
    rmtree(_testdir)
    mkdirs(os.path.join(_testdir, 'sda1'))
    mkdirs(os.path.join(_testdir, 'sda1', 'tmp'))
    mkdirs(os.path.join(_testdir, 'sdb1'))
    mkdirs(os.path.join(_testdir, 'sdb1', 'tmp'))
    conf = {'devices': _testdir, 'swift_dir': _testdir,
            'mount_check': 'false',
            'allowed_headers': 'content-encoding, x-object-manifest, '
                               'content-disposition, foo',
            'disable_fallocate': 'true',
            'allow_versions': 'True',
            'zerovm_maxoutput': 1024 * 1024 * 10}
    prolis = listen(('localhost', 0))
    acc1lis = listen(('localhost', 0))
    acc2lis = listen(('localhost', 0))
    con1lis = listen(('localhost', 0))
    con2lis = listen(('localhost', 0))
    obj1lis = listen(('localhost', 0))
    obj2lis = listen(('localhost', 0))
    _test_sockets = \
        (prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis)
    account_ring_path = os.path.join(_testdir, 'account.ring.gz')
    account_devs = [
        {'port': acc1lis.getsockname()[1]},
        {'port': acc2lis.getsockname()[1]},
    ]
    write_fake_ring(account_ring_path, *account_devs)
    container_ring_path = os.path.join(_testdir, 'container.ring.gz')
    container_devs = [
        {'port': con1lis.getsockname()[1]},
        {'port': con2lis.getsockname()[1]},
    ]
    write_fake_ring(container_ring_path, *container_devs)
    storage_policy._POLICIES = StoragePolicyCollection([
        StoragePolicy(0, 'zero', True),
        StoragePolicy(1, 'one', False),
        StoragePolicy(2, 'two', False)])
    obj_rings = {
        0: ('sda1', 'sdb1'),
        1: ('sdc1', 'sdd1'),
        2: ('sde1', 'sdf1'),
    }
    for policy_index, devices in obj_rings.items():
        policy = POLICIES[policy_index]
        dev1, dev2 = devices
        obj_ring_path = os.path.join(_testdir, policy.ring_name + '.ring.gz')
        obj_devs = [
            {'port': obj1lis.getsockname()[1], 'device': dev1},
            {'port': obj2lis.getsockname()[1], 'device': dev2},
        ]
        write_fake_ring(obj_ring_path, *obj_devs)
    prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone(),
                                      logger=debug_logger('proxy'))
    for policy in POLICIES:
        # make sure all the rings are loaded
        prosrv.get_object_ring(policy.idx)
    # don't loose this one!
    _test_POLICIES = storage_policy._POLICIES
    acc1srv = account_server.AccountController(
        conf, logger=debug_logger('acct1'))
    acc2srv = account_server.AccountController(
        conf, logger=debug_logger('acct2'))
    con1srv = container_server.ContainerController(
        conf, logger=debug_logger('cont1'))
    con2srv = container_server.ContainerController(
        conf, logger=debug_logger('cont2'))
    obj1srv = the_object_server.ObjectController(
        conf, logger=debug_logger('obj1'))
    obj2srv = the_object_server.ObjectController(
        conf, logger=debug_logger('obj2'))
    queuesrv = queue.QueueMiddleware(prosrv, conf,
                                     logger=prosrv.logger)
    nl = NullLogger()
    logging_prosv = proxy_logging.ProxyLoggingMiddleware(queuesrv, conf,
                                                         logger=prosrv.logger)
    prospa = spawn(wsgi.server, prolis, logging_prosv, nl)
    acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl)
    acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl)
    con1spa = spawn(wsgi.server, con1lis, con1srv, nl)
    con2spa = spawn(wsgi.server, con2lis, con2srv, nl)
    obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl)
    obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl)
    _test_servers = \
        (queuesrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv)
    _test_coros = \
        (prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa)
    # Create account
    ts = normalize_timestamp(time())
    partition, nodes = prosrv.account_ring.get_nodes('a')
    for node in nodes:
        conn = swift.proxy.controllers.obj.http_connect(node['ip'],
                                                        node['port'],
                                                        node['device'],
                                                        partition, 'PUT', '/a',
                                                        {'X-Timestamp': ts,
                                                         'x-trans-id': 'test'})
        resp = conn.getresponse()
        assert(resp.status == 201)

Example 114

Project: pyramid_fullauth Source File: test_social_view.py
@pytest.mark.parametrize('profile, email', [
    (
        {
            'accounts': [{'domain': text_type('facebook.com'), 'userid': text_type('2343')}],
            'displayName': text_type('teddy'),
            'verifiedEmail': text_type('[email protected]'),
            'preferredUsername': text_type('teddy'),
            'emails': [{'value': text_type('[email protected]')}],
            'name': text_type('ted')
        },
        '[email protected]'
    ), (
        {
            'accounts': [{'domain': text_type('facebook.com'), 'userid': text_type('2343')}],
            'displayName': text_type('teddy'),
            'preferredUsername': text_type('teddy'),
            'emails': [{'value': text_type('[email protected]')}],
            'name': text_type('ted')
        },
        '[email protected]'
    ), (
        {
            'accounts': [{'domain': text_type('facebook.com'), 'userid': text_type('2343')}],
            'displayName': text_type('teddy'),
            'preferredUsername': text_type('teddy'),
            'emails': [{}],
            'name': text_type('ted')
        },
        '[email protected]'
    ), (
        {
            'accounts': [{'domain': text_type('facebook.com'), 'userid': text_type('2343')}],
            'displayName': text_type('teddy'),
            'preferredUsername': text_type('teddy'),
            'emails': [],
            'name': text_type('ted')
        },
        '[email protected]'
    ), (
        {
            'accounts': [{'domain': text_type('facebook.com'), 'userid': text_type('2343')}],
            'displayName': text_type('teddy'),
            'preferredUsername': text_type('teddy'),
            'name': text_type('ted')
        },
        '[email protected]'
    ),
])
def test_email_from_context(profile, email):
    """Test email_from_context email getting method."""
    from velruse import AuthenticationComplete
    context = AuthenticationComplete(
        profile,
        {'oauthAccessToken': '7897048593434'},
        text_type('facebook'),
        text_type('facebook')
    )
    view = SocialLoginViews(mock.MagicMock())
    assert view._email_from_context(context) == email

Example 115

Project: hyclops Source File: test_libcloud_vsphere.py
    def _set_libcloud_mock(self):
        MockHostSystem.clear_mock()
        MockVirtualMachine.clear_mock()
        vm1 = MockVirtualMachine({
            "name": "vm1",
            "summary": Mock(**{
                "quickStats.overallCpuUsage": 100,
                "quickStats.guestMemoryUsage": 300,
                "config.numCpu": 1,
                "config.memorySizeMB": 2048,
                "config.vmPathName": "[datastore1] /foo/bar.vmx",
                "config.guestFullName": "CentOS 4/5/6 (64bit)",
                "runtime.maxCpuUsage": 2000}),
            "runtime": Mock(**{
                "powerState": "poweredOn",
                "question": Mock(spec=['id', 'choice', 'text'],
                                 **{'id': 1,
                                 'choice.choiceInfo': [Mock(**{'key': 1, 'label': 'choice1', 'summary': 'choice1'})],
                                 'text': 'question message'})}),
            "config": Mock(**{
                "uuid": "vsphere_uuid",
                "extraConfig": [MagicMock()]}),
            "guest": Mock(**{
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "toolsVersionCurrent",
                "net": [MagicMock()]})
        })
        vm2 = MockVirtualMachine({
            "name": "vm2",
            "summary": Mock(**{
                "quickStats.overallCpuUsage": 100,
                "quickStats.guestMemoryUsage": 300,
                "config.numCpu": 1,
                "config.memorySizeMB": 2048,
                "config.vmPathName": "[datastore1] /foo/bar.vmx",
                "config.guestFullName": "CentOS 4/5/6 (64bit)",
                "runtime.maxCpuUsage": 2000}),
            "runtime": Mock(**{
                "powerState": "poweredOn",
                "question": Mock(spec=['id', 'choice', 'message'],
                                 **{'id': 1,
                                    'choice.choiceInfo': [Mock(**{'key': 1, 'label': 'choice1', 'summary': 'choice1'})],
                                    'message': [Mock(text="question message"), Mock(text="with line feed")]})}),
            "config": Mock(**{
                "uuid": "vsphere_uuid",
                "extraConfig": [MagicMock()]}),
            "guest": Mock(**{
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "toolsVersionCurrent",
                "net": [Mock(spec=["ipAddress"], **{"ipAddress": ["127.0.0.1"]})]}),
        })
        vm3 = MockVirtualMachine({
            "name": "vm3",
            "summary": Mock(**{
                "quickStats.overallCpuUsage": 100,
                "quickStats.guestMemoryUsage": 300,
                "config.numCpu": 1,
                "config.memorySizeMB": 2048,
                "config.vmPathName": "[datastore1] /foo/bar.vmx",
                "config.guestFullName": "CentOS 4/5/6 (64bit)",
                "runtime.maxCpuUsage": 2000}),
            "runtime": Mock(spec=["powerState"], **{"powerState": "poweredOn"}),
            "config": Mock(**{
                "uuid": "vsphere_uuid",
                "extraConfig": [MagicMock()]}),
            "guest": Mock(spec=["toolsRunningStatus", "toolsVersionStatus", "ipAddress"], **{
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "toolsVersionCurrent",
                "ipAddress": "127.0.0.1"}),
        })
        vm4 = MockVirtualMachine({
            "name": "vm4",
            "summary": Mock(**{
                "quickStats.overallCpuUsage": 100,
                "quickStats.guestMemoryUsage": 300,
                "config.numCpu": 1,
                "config.memorySizeMB": 2048,
                "config.vmPathName": "[datastore1] /foo/bar.vmx",
                "config.guestFullName": "CentOS 4/5/6 (64bit)",
                "runtime.maxCpuUsage": 2000}),
            "runtime": Mock(**{
                "powerState": "poweredOn",
                "question": Mock(spec=['id', 'choice', 'text'],
                                 **{'id': 1,
                                 'choice.choiceInfo': [Mock(**{'key': 1, 'label': 'choice1', 'summary': 'choice1'})],
                                 'text': 'question message'})}),
            "config": Mock(**{
                "uuid": "vsphere_uuid",
                "extraConfig": [MagicMock()]}),
            "guest": Mock(**{
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "toolsVersionCurrent",
                "net": [MagicMock()]}),
        })
        MockVirtualMachine.add_mock_vm(vm1)
        MockVirtualMachine.add_mock_vm(vm2)
        MockVirtualMachine.add_mock_vm(vm3)
        MockVirtualMachine.add_mock_vm(vm4)
        self.vm = vm1
        self.host = MockHostSystem({
            "name": "host name",
            "datastore": [
                MockDatastore({
                    "name": "datastore name",
                    "summary": Mock(**{"freeSpace": 400 * 1024 ** 3,
                                    "capacity": 800 * 1024 ** 3,
                                    "type": "nfs"})
                })
            ],
            "summary": Mock(**{
                'hardware.uuid': "hardware_uuid",
                'hardware.cpuMhz': 2000,
                'hardware.numCpuCores': 8,
                'quickStats.overallCpuUsage': 300,
                'quickStats.overallMemoryUsage': 1000}),
            "hardware": Mock(**{
                'cpuInfo.numCpuThreads': 16,
                'memorySize': 16 * 1024 ** 3}),
            "vm": [vm1, vm2, vm3]  # not include vm4
        })
        MockHostSystem.add_mock_host(self.host)
        self.node = Node(
            id="vsphere_uuid",
            name="vm1",
            state=0,
            public_ips=[],
            private_ips=[],
            driver=self.driver,
            extra={
                'managedObjectReference': self.vm,
                'status': 'running',
                'cpu': 1,
                'cpu_usage': 5.0,
                'memory': 2048 * 1024 ** 2,
                'memory_usage': 300 * 1024 ** 2,
                'toolsRunningStatus': 'guestToolsRunning',
                'toolsVersionStatus': 'toolsVersionCurrent',
                'vmpath': '[datastore1] /foo/bar.vmx',
                'stuck_state': 1,
                'stuck_question_id': 1,
                'stuck_question': "question message",
                'stuck_choices': [{'label': 'choice1', 'key': 1, 'summary': 'choice1'}],
                'platform': "CentOS 4/5/6 (64bit)",
            }
        )

Example 116

Project: backdrop Source File: test_dispatch.py
    @patch('backdrop.transformers.dispatch.AdminAPI')
    @patch('backdrop.transformers.dispatch.DataSet')
    @patch('backdrop.transformers.tasks.debug.logging')
    @freeze_time('2014-12-14')
    def test_run_transform_applies_additional_fields(
            self,
            mock_logging_task,
            mock_data_set,
            mock_adminAPI):
        mock_logging_task.return_value = [{'new-data': 'point'}]
        adminAPI_instance = mock_adminAPI.return_value
        adminAPI_instance.get_data_set.return_value = {
            "bearer_token": "foo2",
        }
        data_set_instance = MagicMock()
        data_set_instance.get.return_value = {
            'data': [
                {'data': 'point'},
            ],
        }
        mock_data_set.from_group_and_type.return_value = data_set_instance

        earliest = datetime(2014, 12, 10, 12, 00, 00, tzinfo=pytz.utc)
        latest = datetime(2014, 12, 14, 12, 00, 00, tzinfo=pytz.utc)

        run_transform({
            'data_group': 'group',
            'data_type': 'type',
            'token': 'foo',
        }, {
            'type': {
                'function': 'backdrop.transformers.tasks.debug.logging',
            },
            'query-parameters': {
                'period': 'day',
            },
            'options': {
                'additionalFields': {
                    'foo': 'bar',
                }
            },
            'output': {
                'data-group': 'other-group',
                'data-type': 'other-type',
            },
        }, earliest, latest)

        mock_data_set.from_group_and_type.assert_any_call(
            'http://backdrop/data', 'group', 'type',
        )
        data_set_instance.get.assert_called_with(
            query_parameters={
                'period': 'day',
                'flatten': 'true',
                'start_at': '2014-12-10T00:00:00+00:00',
                'end_at': '2014-12-14T00:00:00+00:00',
                'inclusive': 'true',
            },
        )
        mock_data_set.from_group_and_type.assert_any_call(
            'http://backdrop/data', 'other-group', 'other-type', token='foo2',
        )
        data_set_instance.post.assert_called_with([{
            '_id': 'X2ZvbzpiYXI=',
            'new-data': 'point',
            'foo': 'bar'
        }])

Example 117

Project: django-location Source File: test_runmeter.py
    def test_process_new_source(self):
        arbitrary_url = 'http://www.go.com/101'
        arbitrary_route_name = 'Something'
        arbitrary_source = models.LocationSource.objects.create(
            name='Whatnot',
            user=self.user,
            type=self.source_type,
            active=True,
            data={
                'url': arbitrary_url,
                'known_points': {},
            }
        )
        arbitrary_docuement = MagicMock()
        arbitrary_time = datetime.datetime.utcnow().replace(
            tzinfo=utc
        )
        arbitrary_points = [
            {'lat': -122, 'lng': 45, 'key': 'alpha', 'time': 1},
            {'lat': -123, 'lng': 44, 'key': 'beta', 'time': 2}
        ]

        consumer = RunmeterConsumer(arbitrary_source)
        consumer._get_docuement = MagicMock(
            return_value=arbitrary_docuement
        )
        consumer.get_start_time = MagicMock(
            return_value=arbitrary_time
        )
        consumer.get_route_name = MagicMock(
            return_value=arbitrary_route_name
        )
        consumer.get_points = MagicMock(
            return_value=arbitrary_points
        )
        consumer.is_active = MagicMock(
            return_value=False
        )

        consumer.process()

        consumer._get_docuement.assert_called_with(arbitrary_url)
        consumer.get_start_time.assert_called_with(arbitrary_docuement)
        consumer.get_route_name.assert_called_with(arbitrary_docuement)
        consumer.get_points.assert_called_with(
            arbitrary_docuement,
            arbitrary_time
        )

        actual_points = models.LocationSnapshot.objects.order_by('date')
        self.assertEqual(actual_points.count(), 2)

        first_assertions = {
            'date': arbitrary_time + datetime.timedelta(seconds=1),
            'source': arbitrary_source,
            'location': Point(-122, 45)
        }
        for k, v in first_assertions.items():
            self.assertEqual(getattr(actual_points[0], k), v)

        second_assertions = {
            'date': arbitrary_time + datetime.timedelta(seconds=2),
            'source': arbitrary_source,
            'location': Point(-123, 44)
        }
        for k, v in second_assertions.items():
            self.assertEqual(getattr(actual_points[1], k), v)

        self.assertFalse(
            models.LocationSource.objects.get(pk=arbitrary_source.pk).active
        )

Example 118

Project: asiaq Source File: test_disco_elasticache.py
    def setUp(self):
        self.elasticache = DiscoElastiCache(
            vpc=_get_mock_vpc(), aws=_get_mock_aws(), route53=_get_mock_route53())
        self.elasticache.route53 = MagicMock()

        DiscoElastiCache.config = PropertyMock(return_value=get_mock_config({
            'unittest:new-cache': {
                'instance_type': 'cache.m1.small',
                'engine': 'redis',
                'engine_version': '2.8.6',
                'port': '1000',
                'parameter_group': 'default',
                'num_nodes': '5',
                'auto_failover': 'true',
                'maintenance_window': 'sun:10:00-sun:11:00'
            },
            'unittest:old-cache': {
                'instance_type': 'cache.m1.small',
                'engine': 'redis',
                'engine_version': '2.8.6',
                'port': '1000',
                'parameter_group': 'default',
                'num_nodes': '5',
                'auto_failover': 'true'
            }
        }))
        self.elasticache.conn = MagicMock()

        self.replication_groups = [
            {
                'ReplicationGroupId': self.elasticache._get_redis_replication_group_id('old-cache'),
                'Description': 'unittest-old-cache',
                'Status': 'available',
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'old-cache.example.com'
                    }
                }]
            },
            {
                'ReplicationGroupId': self.elasticache._get_redis_replication_group_id('cache2'),
                'Description': 'unittest-cache2',
                'Status': 'available',
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'cache2.example.com'
                    }
                }]
            },
            {
                'ReplicationGroupId': self.elasticache._get_redis_replication_group_id('cache'),
                'Description': 'unittest2-cache',
                'Status': 'available'
            }
        ]

        def _create_replication_group(**kwargs):
            self.replication_groups.append({
                'ReplicationGroupId': kwargs['ReplicationGroupId'],
                'NodeGroups': [{
                    'PrimaryEndpoint': {
                        'Address': 'foo.example.com'
                    }
                }]
            })

        # pylint doesn't like Boto3's argument names
        # pylint: disable=C0103
        def _describe_replication_groups(ReplicationGroupId=None):
            if ReplicationGroupId is None:
                return {
                    'ReplicationGroups': self.replication_groups
                }
            else:
                found_groups = [group for group in self.replication_groups
                                if group['ReplicationGroupId'] == ReplicationGroupId]
                return {
                    'ReplicationGroups': found_groups
                }

        # pylint: disable=C0103
        def _describe_cache_subnet_groups(CacheSubnetGroupName=None):
            if CacheSubnetGroupName:
                return {
                    'CacheSubnetGroups': [{
                        'CacheSubnetGroupName': 'unittest-intranet'
                    }]
                }
            elif CacheSubnetGroupName is None:
                return {
                    'CacheSubnetGroups': [{
                        'CacheSubnetGroupName': 'unittest-intranet'
                    }, {
                        'CacheSubnetGroupName': 'unittest-build'
                    }]
                }

        self.elasticache.conn.describe_replication_groups.side_effect = _describe_replication_groups
        self.elasticache.conn.describe_cache_subnet_groups.side_effect = _describe_cache_subnet_groups
        self.elasticache.conn.create_replication_group.side_effect = _create_replication_group

Example 119

Project: sd-agent Source File: test_process.py
    def test_relocated_procfs(self):
        from utils.platform import Platform
        import tempfile
        import shutil
        import uuid

        already_linux = Platform.is_linux()
        unique_process_name = str(uuid.uuid4())
        my_procfs = tempfile.mkdtemp()

        def _fake_procfs(arg, root=my_procfs):
            for key, val in arg.iteritems():
                path = os.path.join(root, key)
                if isinstance(val, dict):
                    os.mkdir(path)
                    _fake_procfs(val, path)
                else:
                    with open(path, "w") as f:
                        f.write(str(val))
        _fake_procfs({
            '1': {
                'status': (
                    "Name:\t%s\n"
                ) % unique_process_name,
                'stat': ('1 (%s) S 0 1 1 ' + ' 0' * 46) % unique_process_name,
                'cmdline': unique_process_name,

            },
            'stat': (
                "cpu  13034 0 18596 380856797 2013 2 2962 0 0 0\n"
                "btime 1448632481\n"
            ),
        })

        config = {
            'init_config': {
                'procfs_path': my_procfs
            },
            'instances': [{
                'name': 'moved_procfs',
                'search_string': [unique_process_name],
                'exact_match': False,
                'ignored_denied_access': True,
                'thresholds': {'warning': [1, 10], 'critical': [1, 100]},
            }]
        }

        version = int(psutil.__version__.replace(".", ""))
        try:
            def import_mock(name, i_globals={}, i_locals={}, fromlist=[], level=-1, orig_import=__import__):
                # _psutil_linux and _psutil_posix are the C bindings; use a mock for those
                if name in ('_psutil_linux', '_psutil_posix') or level >= 1 and ('_psutil_linux' in fromlist or '_psutil_posix' in fromlist):
                    m = MagicMock()
                    # the import system will ask us for our own name
                    m._psutil_linux = m
                    m._psutil_posix = m
                    # there's a version safety check in psutil/__init__.py; this skips it
                    m.version = version
                    return m
                return orig_import(name, i_globals, i_locals, fromlist, level)

            orig_open = open

            def open_mock(name, *args):
                from mock import MagicMock

                # Work around issue addressed here: https://github.com/giampaolo/psutil/pull/715
                # TODO: Remove open_mock if the patch lands
                # We can't use patch here because 1) we're reloading psutil, and 2) the problem is happening during the import.
                # NB: The values generated here are mostly ignored, and will correctly be overwritten once we set PROCFS_PATH
                if name == '/proc/stat':
                    handle = MagicMock(spec=file)
                    handle.write.return_value = None
                    handle.__enter__.return_value = handle
                    handle.readline.return_value = 'cpu  13002 0 18504 377363817 1986 2 2960 0 0 0'
                    return handle
                return orig_open(name, *args)

            # contextlib.nested is deprecated in favor of with MGR1, MGR2, ... etc, but we have too many mocks to fit on one line and apparently \ line
            # continuation is not flake8 compliant, even when semantically required (as here). Patch is unlikely to throw errors that are suppressed, so
            # the main downside of contextlib is avoided.
            with contextlib.nested(patch('sys.platform', 'linux'),
                                   patch('socket.AF_PACKET', create=True),
                                   patch('__builtin__.__import__', side_effect=import_mock),
                                   patch('__builtin__.open', side_effect=open_mock)):
                if not already_linux:
                    # Reloading psutil fails on linux, but we only need to do so if we didn't start out on a linux platform
                    reload(psutil)
                assert Platform.is_linux()

                self.run_check(config, mocks={'get_pagefault_stats': noop_get_pagefault_stats})
        finally:
            shutil.rmtree(my_procfs)
            if not already_linux:
                # restore the original psutil that doesn't have our mocks
                reload(psutil)
            else:
                psutil.PROCFS_PATH = '/proc'

        expected_tags = self.generate_expected_tags(config['instances'][0])
        self.assertServiceCheckOK('process.up', count=1, tags=['process:moved_procfs'])

        self.assertMetric('system.processes.number', at_least=1, tags=expected_tags)

        self.coverage_report()

Example 120

Project: ANALYSE Source File: test_crowdsource_hinter.py
    @staticmethod
    def create(hints=None,
               previous_answers=None,
               user_submissions=None,
               user_voted=None,
               moderate=None,
               mod_queue=None):
        """
        A factory method for making CHM's
        """
        # Should have a single child, but it doesn't matter what that child is
        field_data = {'data': CHModuleFactory.sample_problem_xml, 'children': [None]}

        if hints is not None:
            field_data['hints'] = hints
        else:
            field_data['hints'] = {
                '24.0': {'0': ['Best hint', 40],
                         '3': ['Another hint', 30],
                         '4': ['A third hint', 20],
                         '6': ['A less popular hint', 3]},
                '25.0': {'1': ['Really popular hint', 100]}
            }

        if mod_queue is not None:
            field_data['mod_queue'] = mod_queue
        else:
            field_data['mod_queue'] = {
                '24.0': {'2': ['A non-approved hint']},
                '26.0': {'5': ['Another non-approved hint']}
            }

        if previous_answers is not None:
            field_data['previous_answers'] = previous_answers
        else:
            field_data['previous_answers'] = [
                ['24.0', [0, 3, 4]],
                ['29.0', []]
            ]

        if user_submissions is not None:
            field_data['user_submissions'] = user_submissions
        else:
            field_data['user_submissions'] = ['24.0', '29.0']

        if user_voted is not None:
            field_data['user_voted'] = user_voted

        if moderate is not None:
            field_data['moderate'] = moderate

        descriptor = Mock(weight='1')
        # Make the descriptor have a capa problem child.
        capa_descriptor = MagicMock()
        capa_descriptor.name = 'capa'
        capa_descriptor.displayable_items.return_value = [capa_descriptor]
        descriptor.get_children.return_value = [capa_descriptor]

        # Make a fake capa module.
        capa_module = MagicMock()
        capa_module.lcp = MagicMock()
        responder = MagicMock()

        def validate_answer(answer):
            """ A mock answer validator - simulates a numerical response"""
            try:
                float(answer)
                return True
            except ValueError:
                return False
        responder.validate_answer = validate_answer

        def compare_answer(ans1, ans2):
            """ A fake answer comparer """
            return ans1 == ans2
        responder.compare_answer = compare_answer

        capa_module.lcp.responders = {'responder0': responder}
        capa_module.displayable_items.return_value = [capa_module]

        system = get_test_system()
        # Make the system have a marginally-functional get_module

        def fake_get_module(descriptor):
            """
            A fake module-maker.
            """
            return capa_module
        system.get_module = fake_get_module
        module = CrowdsourceHinterModule(descriptor, system, DictFieldData(field_data), Mock())

        return module

Example 121

Project: DIRAC Source File: Test_XROOTStorage.py
  def test_getDirectory( self ):
    ''' tests the output of getDirectory
    '''
    global mocked_xrootclient

    resource = XROOTStorage( 'storageName', self.parameterDict )

    statusStatDirMock = xrootStatusMock()
    statusStatDirMock.makeOk()

    statInfoMockDir = xrootStatInfoMock()
    statInfoMockDir.makeDir()

    statInfoMockFile = xrootStatInfoMock()
    statInfoMockFile.size = -1
    statInfoMockFile.makeFile()

    # Old comment, still true :(
    # This dirty thing forces us to know how many time api.stat is called and in what order...
    mocked_xrootclient.stat.side_effect = [ ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ), ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ), ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ) ]

    statDir1 = xrootStatInfoMock()
    statDir1.makeDir()
    statDir1.size = -1
    dir1 = xrootListEntryMock( "dir1", "host", statDir1 )

    statDir2 = xrootStatInfoMock()
    statDir2.makeDir()
    statDir2.size = -1
    dir2 = xrootListEntryMock( "dir2", "host", statDir2 )

    statFile1 = xrootStatInfoMock()
    statFile1.makeFile()
    statFile1.size = -1
    file1 = xrootListEntryMock( "file1", "host", statFile1 )

    statFile2 = xrootStatInfoMock()
    statFile2.makeFile()
    statFile2.size = -1
    file2 = xrootListEntryMock( "file2", "host", statFile2 )

    statFile3 = xrootStatInfoMock()
    statFile3.makeFile()
    statFile3.size = -1
    file3 = xrootListEntryMock( "file3", "host", statFile3 )

    directoryListMock1 = xrootDirectoryListMock( "parent", [dir1, dir2, file1] )
    directoryListMock2 = xrootDirectoryListMock( "dir1", [file2] )
    directoryListMock3 = xrootDirectoryListMock( "dir1", [file3] )

    statusMock = xrootStatusMock()
    statusMock.makeOk()

    mocked_xrootclient.copy.return_value = statusMock, None
    mocked_xrootclient.dirlist.side_effect = [( statusStatDirMock, directoryListMock1 ), ( statusStatDirMock, directoryListMock2 ), ( statusStatDirMock, directoryListMock3 )]

    # This test should get the 3 files
    copymock = mock.Mock()
    copymock.run.return_value = (statusMock, None)
    mocked_xrootd.client.CopyProcess = mock.Mock(return_value = copymock)
    # Mock the os calls that access the filesystem and really create the directories locally.
    with mock.patch('os.makedirs', new=MagicMock(return_value=True)), mock.patch('os.remove', new=MagicMock(return_value=True)):
      res = resource.getDirectory( "A" )
      self.assertEqual( True, res['OK'] )
      self.assertEqual( {"A" : { "Files" : 3, "Size" :-3}}, res['Value']['Successful'] )
      self.assertEqual( {}, res['Value']['Failed'] )

      # The copy command is just in error
      statusMock.makeError()
      mocked_xrootclient.dirlist.side_effect =  [( statusStatDirMock, directoryListMock1 ), ( statusStatDirMock, directoryListMock2 ), ( statusStatDirMock, directoryListMock3 )]
      mocked_xrootclient.stat.side_effect = [( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ), ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ), ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile )]

      res = resource.getDirectory( "A" )
      self.assertEqual( True, res['OK'] )
      self.assertEqual( {}, res['Value']['Successful'] )
      self.assertEqual( {"A" : { "Files" : 0, "Size" : 0}}, res['Value']['Failed'] )

      # The copy command is fatal
      statusMock.makeFatal()
      mocked_xrootclient.dirlist.side_effect = [( statusStatDirMock, directoryListMock1 ), ( statusStatDirMock, directoryListMock2 ), ( statusStatDirMock, directoryListMock3 )]
      mocked_xrootclient.stat.side_effect =  [( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ), ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile ), ( statusStatDirMock, statInfoMockDir ), ( statusStatDirMock, statInfoMockFile )]

      res = resource.getDirectory( "A" )
      self.assertEqual( True, res['OK'] )
      self.assertEqual( {}, res['Value']['Successful'] )
      self.assertEqual( {"A" : { "Files" : 0, "Size" : 0}}, res['Value']['Failed'] )

Example 122

Project: DIRAC Source File: Test_XROOTStorage.py
  @mock.patch('os.path.exists', new=MagicMock( return_value = True ))
  @mock.patch('DIRAC.Resources.Storage.XROOTStorage.getSize', new=MagicMock( return_value = 1 ))
  def test_putFile( self ):
    """ Test the output of putFile"""

    global mocked_xrootclient

    resource = XROOTStorage( 'storageName', self.parameterDict )

    statusMock = xrootStatusMock()
    statusMock.makeOk()

    mocked_xrootclient.copy.return_value = statusMock, None

    statusMkDirMock = xrootStatusMock()
    statusMkDirMock.makeOk()

    mocked_xrootclient.mkdir.return_value = statusMkDirMock, None

    statusRmMock = xrootStatusMock()
    statusRmMock.makeOk()

    mocked_xrootclient.rm.return_value = statusRmMock, None

    statusStatMock = xrootStatusMock()
    statusStatMock.makeOk()


    statInfoMock = xrootStatInfoMock()
    statInfoMock.makeFile()
    statInfoMock.size = 1

    updateStatMockReferences(statusStatMock, statInfoMock)

    # This test should be completely okay
    copymock = mock.Mock()
    copymock.run.return_value = (statusMock, None)
    mocked_xrootd.client.CopyProcess = mock.Mock(return_value = copymock)
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {"remoteA" : 1}, res['Value']['Successful'] )
    self.assertEqual( {}, res['Value']['Failed'] )


    # Here the sizes should not match
    statInfoMock.size = 1000
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {}, res['Value']['Successful'] )
    self.assertEqual( "remoteA", res['Value']['Failed'].keys()[0] )
    statInfoMock.size = 1


    # Here we should not be able to get the file from storage
    statusMock.makeError()
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {}, res['Value']['Successful'] )
    self.assertEqual( "remoteA", res['Value']['Failed'].keys()[0] )

    # Fatal error in getting the file from storage
    statusMock.makeFatal()
    res = resource.putFile( {"remoteA" : "localA"} )
    self.assertEqual( True, res['OK'] )
    self.assertEqual( {}, res['Value']['Successful'] )
    self.assertEqual( "remoteA", res['Value']['Failed'].keys()[0] )

    # Bad input
    res = resource.putFile( "remoteA" )
    self.assertEqual( False, res['OK'] )


    # Error, but not 3011 when checking existance of file, and then successful anyway
    statusMock.makeOk()

    with mock.patch.object(XROOTStorage, '_XROOTStorage__singleExists', return_value=S_OK(S_ERROR("error checking existance "))):
      res = resource.putFile( {"remoteA" : "localA"} )
      self.assertEqual( True, res['OK'] )
      self.assertEqual(  {'remoteA': 1}, res['Value']['Successful'] )

Example 123

Project: sherpa Source File: conftest.py
@pytest.fixture
def mock_chips(monkeypatch, tmpdir, request):
    """
    Fixture for tests mocking chips

    Returns
    -------
    The tuple (backend, mock_chips)
    """

    # First, inject a mock chips module in the backend.
    chips = mock.MagicMock()
    monkeypatch.setitem(sys.modules, name="pychips", value=chips)

    # figure out what IO module we can use
    try:
        import pycrates
        io = "crates"
    except ImportError:
        io = "pyfits"  # Even if this is not available, config code will fall back to dummy

    # Now, write a fake configuration file to a temporary location
    config = tmpdir.mkdir("config").join("sherpa.rc")
    config.write("""
[options]
plot_pkg : chips
io_pkg : {}
    """.format(io))

    # Then, inject a function that returns the fake file
    def get_config():
        return str(config)
    import sherpa
    monkeypatch.setattr(sherpa, name="get_config", value=get_config)

    # Force reload of sherpa modules that might have already read the configuration
    from sherpa import plot
    from sherpa.astro import plot as astro_plot

    reload_module(plot)
    reload_module(astro_plot)

    # Force a reload, to make sure we always return a fresh instance, so we track the correct mock object
    from sherpa.plot import chips_backend
    reload_module(chips_backend)

    def fin():
        monkeypatch.undo()
        reload_module(sherpa)
        reload_module(plot)
        reload_module(astro_plot)
        reload_module(sherpa.all)
        reload_module(sherpa.astro.all)  # These are required because otherwise Python will not match imported classes.

    request.addfinalizer(fin)

    return chips_backend, chips

Example 124

Project: edx-analytics-pipeline Source File: test_s3_util.py
    def _make_s3_generator(self, bucket_name, root, path_info, patterns):
        """Generates a list of matching S3 sources using a mock S3 connection."""
        s3_conn = MagicMock()
        s3_bucket = MagicMock()
        s3_conn.get_bucket = MagicMock(return_value=s3_bucket)
        target_list = [self._make_key("{root}/{path}".format(root=root, path=path), size)
                       for path, size in path_info.iteritems()]
        s3_bucket.list = MagicMock(return_value=target_list)
        print [(k.key, k.size) for k in target_list]

        s3_bucket.name = bucket_name
        source = "s3://{bucket}/{root}".format(bucket=bucket_name, root=root)
        generator = s3_util.generate_s3_sources(s3_conn, source, patterns)
        output = list(generator)
        return output

    def _run_without_filtering(self, bucket_name, root, path_info):
        """Runs generator and checks output."""
        patterns = ['*']
        output = self._make_s3_generator(bucket_name, root, path_info, patterns)
        self.assertEquals(len(output), len(path_info))
        expected = [(bucket_name, root, key) for key in path_info]
        self.assertEquals(set(output), set(expected))

    def test_normal_generate(self):
        bucket_name = "bucket_name"
        root = "root1/root2"
        path_info = {
            "subdir1/path1": 1000,
            "path2": 2000,
        }
        self._run_without_filtering(bucket_name, root, path_info)

    def test_generate_with_empty_root(self):
        bucket_name = "bucket_name"
        root = ""
        path_info = {
            "subdir1/path1": 1000,
            "path2": 2000,
        }
        self._run_without_filtering(bucket_name, root, path_info)

    def test_generate_with_pattern_filtering(self):
        bucket_name = "bucket_name"
        root = "root1/root2"
        path_info = {
            "subdir1/path1": 1000,
            "path2": 2000,
        }
        patterns = ['*1']
        output = self._make_s3_generator(bucket_name, root, path_info, patterns)
        self.assertEquals(len(output), 1)
        self.assertEquals(output, [(bucket_name, root, "subdir1/path1")])

    def test_generate_with_size_filtering(self):
        bucket_name = "bucket_name"
        root = "root1/root2"
        path_info = {
            "subdir1/path1": 1000,
            "path2": 0,
        }
        patterns = ['*1']
        output = self._make_s3_generator(bucket_name, root, path_info, patterns)
        self.assertEquals(len(output), 1)
        self.assertEquals(output, [(bucket_name, root, "subdir1/path1")])

    def test_generate_with_trailing_slash(self):
        bucket_name = "bucket_name"
        root = "root1/root2/"
        path_info = {
            "subdir1/path1": 1000,
            "path2": 2000,
        }
        patterns = ['*']
        output = self._make_s3_generator(bucket_name, root, path_info, patterns)
        self.assertEquals(len(output), 2)
        self.assertEquals(set(output), set([
            (bucket_name, root.rstrip('/'), "subdir1/path1"),
            (bucket_name, root.rstrip('/'), "path2")
        ]))

Example 125

Project: elastalert Source File: alerts_test.py
def test_jira_arbitrary_field_support():
    description_txt = "Description stuff goes here like a runbook link."
    rule = {
        'name': 'test alert',
        'jira_account_file': 'jirafile',
        'type': mock_rule(),
        'owner': 'the_owner',
        'jira_project': 'testproject',
        'jira_issuetype': 'testtype',
        'jira_server': 'jiraserver',
        'jira_label': 'testlabel',
        'jira_component': 'testcomponent',
        'jira_description': description_txt,
        'jira_watchers': ['testwatcher1', 'testwatcher2'],
        'jira_arbitrary_reference_string_field': '$owner$',
        'jira_arbitrary_string_field': 'arbitrary_string_value',
        'jira_arbitrary_string_array_field': ['arbitrary_string_value1', 'arbitrary_string_value2'],
        'jira_arbitrary_string_array_field_provided_as_single_value': 'arbitrary_string_value_in_array_field',
        'jira_arbitrary_number_field': 1,
        'jira_arbitrary_number_array_field': [2, 3],
        'jira_arbitrary_number_array_field_provided_as_single_value': 1,
        'jira_arbitrary_complex_field': 'arbitrary_complex_value',
        'jira_arbitrary_complex_array_field': ['arbitrary_complex_value1', 'arbitrary_complex_value2'],
        'jira_arbitrary_complex_array_field_provided_as_single_value': 'arbitrary_complex_value_in_array_field',
        'timestamp_field': '@timestamp',
        'alert_subject': 'Issue {0} occurred at {1}',
        'alert_subject_args': ['test_term', '@timestamp']
    }

    mock_priority = mock.MagicMock(id='5')

    mock_fields = [
        {'name': 'arbitrary reference string field', 'id': 'arbitrary_reference_string_field', 'schema': {'type': 'string'}},
        {'name': 'arbitrary string field', 'id': 'arbitrary_string_field', 'schema': {'type': 'string'}},
        {'name': 'arbitrary string array field', 'id': 'arbitrary_string_array_field', 'schema': {'type': 'array', 'items': 'string'}},
        {'name': 'arbitrary string array field provided as single value', 'id': 'arbitrary_string_array_field_provided_as_single_value', 'schema': {'type': 'array', 'items': 'string'}},
        {'name': 'arbitrary number field', 'id': 'arbitrary_number_field', 'schema': {'type': 'number'}},
        {'name': 'arbitrary number array field', 'id': 'arbitrary_number_array_field', 'schema': {'type': 'array', 'items': 'number'}},
        {'name': 'arbitrary number array field provided as single value', 'id': 'arbitrary_number_array_field_provided_as_single_value', 'schema': {'type': 'array', 'items': 'number'}},
        {'name': 'arbitrary complex field', 'id': 'arbitrary_complex_field', 'schema': {'type': 'ArbitraryType'}},
        {'name': 'arbitrary complex array field', 'id': 'arbitrary_complex_array_field', 'schema': {'type': 'array', 'items': 'ArbitraryType'}},
        {'name': 'arbitrary complex array field provided as single value', 'id': 'arbitrary_complex_array_field_provided_as_single_value', 'schema': {'type': 'array', 'items': 'ArbitraryType'}},
    ]

    with nested(
            mock.patch('elastalert.alerts.JIRA'),
            mock.patch('elastalert.alerts.yaml_loader')
    ) as (mock_jira, mock_open):
        mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'}
        mock_jira.return_value.priorities.return_value = [mock_priority]
        mock_jira.return_value.fields.return_value = mock_fields
        alert = JiraAlerter(rule)
        alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}])

    expected = [
        mock.call('jiraserver', basic_auth=('jirauser', 'jirapassword')),
        mock.call().priorities(),
        mock.call().fields(),
        mock.call().create_issue(
            issuetype={'name': 'testtype'},
            project={'key': 'testproject'},
            labels=['testlabel'],
            components=[{'name': 'testcomponent'}],
            description=mock.ANY,
            summary='Issue test_value occurred at 2014-10-31T00:00:00',
            arbitrary_reference_string_field='the_owner',
            arbitrary_string_field='arbitrary_string_value',
            arbitrary_string_array_field=['arbitrary_string_value1', 'arbitrary_string_value2'],
            arbitrary_string_array_field_provided_as_single_value=['arbitrary_string_value_in_array_field'],
            arbitrary_number_field=1,
            arbitrary_number_array_field=[2, 3],
            arbitrary_number_array_field_provided_as_single_value=[1],
            arbitrary_complex_field={'name': 'arbitrary_complex_value'},
            arbitrary_complex_array_field=[{'name': 'arbitrary_complex_value1'}, {'name': 'arbitrary_complex_value2'}],
            arbitrary_complex_array_field_provided_as_single_value=[{'name': 'arbitrary_complex_value_in_array_field'}],
        ),
        mock.call().add_watcher(mock.ANY, 'testwatcher1'),
        mock.call().add_watcher(mock.ANY, 'testwatcher2'),
    ]

    # We don't care about additional calls to mock_jira, such as __str__
    assert mock_jira.mock_calls[:6] == expected
    assert mock_jira.mock_calls[3][2]['description'].startswith(description_txt)

    # Reference an arbitrary string field that is not defined on the JIRA server
    rule['jira_nonexistent_field'] = 'nonexistent field value'

    with nested(
            mock.patch('elastalert.alerts.JIRA'),
            mock.patch('elastalert.alerts.yaml_loader')
    ) as (mock_jira, mock_open):
        mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'}
        mock_jira.return_value.priorities.return_value = [mock_priority]
        mock_jira.return_value.fields.return_value = mock_fields

        with pytest.raises(Exception) as exception:
            alert = JiraAlerter(rule)
            alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}])
        assert "Could not find a definition for the jira field 'nonexistent field'" in str(exception)

    del rule['jira_nonexistent_field']

    # Reference a watcher that does not exist
    rule['jira_watchers'] = 'invalid_watcher'

    with nested(
            mock.patch('elastalert.alerts.JIRA'),
            mock.patch('elastalert.alerts.yaml_loader')
    ) as (mock_jira, mock_open):
        mock_open.return_value = {'user': 'jirauser', 'password': 'jirapassword'}
        mock_jira.return_value.priorities.return_value = [mock_priority]
        mock_jira.return_value.fields.return_value = mock_fields

        # Cause add_watcher to raise, which most likely means that the user did not exist
        mock_jira.return_value.add_watcher.side_effect = Exception()

        with pytest.raises(Exception) as exception:
            alert = JiraAlerter(rule)
            alert.alert([{'test_term': 'test_value', '@timestamp': '2014-10-31T00:00:00'}])
        assert "Exception encountered when trying to add 'invalid_watcher' as a watcher. Does the user exist?" in str(exception)

Example 126

Project: barman Source File: test_server.py
    def test_replication_status(self, capsys):
        """
        Test management of pg_stat_archiver view output

        :param MagicMock connect_mock: mock the database connection
        :param capsys: retrieve output from consolle

        """

        # Build a fake get_replication_stats record
        replication_stats_data = dict(
            pid=93275,
            usesysid=10,
            usename='postgres',
            application_name='replica',
            client_addr=None,
            client_hostname=None,
            client_port=-1,
            slot_name=None,
            backend_start=datetime.datetime(
                2016, 5, 6, 9, 29, 20, 98534,
                tzinfo=FixedOffsetTimezone(offset=120)),
            backend_xmin='940',
            state='streaming',
            sent_location='0/3005FF0',
            write_location='0/3005FF0',
            flush_location='0/3005FF0',
            replay_location='0/3005FF0',
            current_location='0/3005FF0',
            sync_priority=0,
            sync_state='async'
        )
        replication_stats_class = namedtuple("Record",
                                             replication_stats_data.keys())
        replication_stats_record = replication_stats_class(
            **replication_stats_data)

        # Prepare the server
        server = build_real_server(main_conf={'archiver': 'on'})
        server.postgres = MagicMock()
        server.postgres.get_replication_stats.return_value = [
            replication_stats_record]
        server.postgres.current_xlog_location = "AB/CDEF1234"

        # Execute the test (ALL)
        server.postgres.reset_mock()
        server.replication_status('all')
        (out, err) = capsys.readouterr()
        assert err == ''
        server.postgres.get_replication_stats.assert_called_once_with(
            PostgreSQLConnection.ANY_STREAMING_CLIENT)

        # Execute the test (WALSTREAMER)
        server.postgres.reset_mock()
        server.replication_status('wal-streamer')
        (out, err) = capsys.readouterr()
        assert err == ''
        server.postgres.get_replication_stats.assert_called_once_with(
            PostgreSQLConnection.WALSTREAMER)

        # Execute the test (failure: PostgreSQL too old)
        server.postgres.reset_mock()
        server.postgres.get_replication_stats.side_effect = \
            PostgresUnsupportedFeature('9.1')
        server.replication_status('all')
        (out, err) = capsys.readouterr()
        assert 'Requires PostgreSQL 9.1 or higher' in out
        assert err == ''
        server.postgres.get_replication_stats.assert_called_once_with(
            PostgreSQLConnection.ANY_STREAMING_CLIENT)

        # Execute the test (failure: superuser required)
        server.postgres.reset_mock()
        server.postgres.get_replication_stats.side_effect = \
            PostgresSuperuserRequired
        server.replication_status('all')
        (out, err) = capsys.readouterr()
        assert 'Requires superuser rights' in out
        assert err == ''
        server.postgres.get_replication_stats.assert_called_once_with(
            PostgreSQLConnection.ANY_STREAMING_CLIENT)

        # Test output reaction to missing attributes
        del replication_stats_data['slot_name']
        server.postgres.reset_mock()
        server.replication_status('all')
        (out, err) = capsys.readouterr()
        assert 'Replication slot' not in out

Example 127

Project: XBlock Source File: test_core.py
def test_set_field_access():
    # Check that sets are correctly saved when not directly set
    class FieldTester(XBlock):
        """Test XBlock for field access testing"""
        field_a = Set(scope=Scope.settings)
        field_b = Set(scope=Scope.content, default=[1, 2, 3])
        field_c = Set(scope=Scope.content, default=[4, 5, 6])
        field_d = Set(scope=Scope.settings)

    field_tester = FieldTester(MagicMock(), DictFieldData({'field_a': [200], 'field_b': [11, 12, 13]}), Mock())

    # Check initial values have been set properly
    assert_equals(set([200]), field_tester.field_a)
    assert_equals(set([11, 12, 13]), field_tester.field_b)
    assert_equals(set([4, 5, 6]), field_tester.field_c)
    assert_equals(set(), field_tester.field_d)

    # Update the fields
    field_tester.field_a.add(1)
    field_tester.field_b.add(14)
    field_tester.field_c.remove(5)
    field_tester.field_d.add(1)

    # The fields should be update in the cache, but /not/ in the underlying kvstore.
    assert_equals(set([200, 1]), field_tester.field_a)
    assert_equals(set([11, 12, 13, 14]), field_tester.field_b)
    assert_equals(set([4, 6]), field_tester.field_c)
    assert_equals(set([1]), field_tester.field_d)

    # Examine model data directly
    #  Caveat: there's not a clean way to copy the originally provided values for `field_a` and `field_b`
    #  when we instantiate the XBlock. So, the values for those two in both `_field_data` and `_field_data_cache`
    #  point at the same object. Thus, `field_a` and `field_b` actually have the correct values in
    #  `_field_data` right now. `field_c` does not, because it has never been written to the `_field_data`.
    assert_false(field_tester._field_data.has(field_tester, 'field_c'))
    assert_false(field_tester._field_data.has(field_tester, 'field_d'))

    # save the XBlock
    field_tester.save()

    # verify that the fields have been updated correctly
    assert_equals(set([200, 1]), field_tester.field_a)
    assert_equals(set([11, 12, 13, 14]), field_tester.field_b)
    assert_equals(set([4, 6]), field_tester.field_c)
    assert_equals(set([1]), field_tester.field_d)
    # Now, the fields should be updated in the underlying kvstore

    assert_equals(set([200, 1]), field_tester._field_data.get(field_tester, 'field_a'))
    assert_equals(set([11, 12, 13, 14]), field_tester._field_data.get(field_tester, 'field_b'))
    assert_equals(set([4, 6]), field_tester._field_data.get(field_tester, 'field_c'))
    assert_equals(set([1]), field_tester._field_data.get(field_tester, 'field_d'))

Example 128

Project: paasta Source File: test_mesos_tools.py
@mark.parametrize('test_case', [
    # task_id, file1, file2, nlines, raise_what
    ['a_task',  # test_case0 - OK
     ['stdout', [str(x) for x in range(20)]],
     ['stderr', [str(x) for x in range(30)]],
     10,
     None],
    ['a_task',  # test_case1 - OK, short stdout, swapped stdout/stderr
     ['stderr', [str(x) for x in range(30)]],
     ['stdout', ['1', '2']],
     10,
     None],
    ['a_task', None, None, 10, mesos.exceptions.MasterNotAvailableException],
    ['a_task', None, None, 10, mesos.exceptions.SlaveDoesNotExist],
    ['a_task', None, None, 10, mesos.exceptions.TaskNotFoundException],
    ['a_task', None, None, 10, mesos.exceptions.FileNotFoundForTaskException],
    ['a_task', None, None, 10, utils.TimeoutError]
])
def test_format_stdstreams_tail_for_task(
    test_case,
):
    def gen_mesos_cli_fobj(file_path, file_lines):
        """mesos.cli.cluster.files (0.1.5),
        returns a list of mesos.cli.mesos_file.File
        `File` is an iterator-like object.
        """
        fake_iter = mock.MagicMock()
        fake_iter.return_value = reversed(file_lines)
        fobj = mock.create_autospec(mesos.mesos_file.File)
        fobj.path = file_path
        fobj.__reversed__ = fake_iter
        return fobj

    def get_short_task_id(task_id):
        return task_id

    def gen_mock_cluster_files(file1, file2, raise_what):
        def retfunc(*args, **kwargs):
            # If we're asked to raise a particular exception we do so.
            # .message is set to the exception class name.
            if raise_what:
                raise raise_what(raise_what)
            return [
                gen_mesos_cli_fobj(file1[0], file1[1]),
                gen_mesos_cli_fobj(file2[0], file2[1])
            ]
        mock_cluster_files = mock.MagicMock()
        mock_cluster_files.side_effect = retfunc
        return mock_cluster_files

    def gen_output(task_id, file1, file2, nlines, raise_what):
        error_message = PaastaColors.red("      couldn't read stdout/stderr for %s (%s)")
        output = []
        if not raise_what:
            files = [file1, file2]
            # reverse sort because stdout is supposed to always come before stderr in the output
            files.sort(key=lambda f: f[0], reverse=True)
            for f in files:
                output.append(PaastaColors.blue("      %s tail for %s" % (f[0], task_id)))
                output.extend(f[1][-nlines:])
                output.append(PaastaColors.blue("      %s EOF" % f[0]))
        else:
            if raise_what == utils.TimeoutError:
                raise_what = 'timeout'
            output.append(error_message % (task_id, raise_what))
        return output

    task_id, file1, file2, nlines, raise_what = test_case

    mock_cluster_files = gen_mock_cluster_files(file1, file2, raise_what)
    fake_task = {'id': task_id}
    expected = gen_output(task_id, file1, file2, nlines, raise_what)
    with mock.patch('paasta_tools.mesos_tools.get_mesos_config', autospec=True):
        with mock.patch('paasta_tools.mesos_tools.cluster.get_files_for_tasks', mock_cluster_files, autospec=None):
            result = mesos_tools.format_stdstreams_tail_for_task(fake_task, get_short_task_id)
            assert result == expected

Example 129

Project: catsnap Source File: test_tag_batch.py
    @patch('catsnap.batch.tag_batch.BatchWriteList')
    @patch('catsnap.batch.tag_batch.Client')
    @patch('catsnap.batch.tag_batch.get_item_batch')
    def test_add_image_to_tags(self, get_item_batch, Client, BatchWriteList):
        existing_tag_item = MagicMock()
        def existing_getitem(key):
            if key == 'filenames':
                return '["facade"]'
            elif key == HASH_KEY:
                return 'bleep'
            else:
                raise ValueError(key)
        existing_tag_item.__getitem__.side_effect = existing_getitem
        get_item_batch.return_value = [ existing_tag_item ]
        new_tag_item = MagicMock()
        new_tag_item.__getitem__.return_value = 'bloop'
        table = Mock()
        table.new_item.return_value = new_tag_item
        table.name = 'thetablename'
        client = Mock()
        client.table.return_value = table
        dynamo = Mock()
        client.get_dynamodb.return_value = dynamo
        Client.return_value = client
        write_list = Mock()
        first_response = {
                'UnprocessedItems': { 'thetablename': [
                        {'PutRequest': {
                            'Item': {
                                'tag': 'bloop',
                                'filenames': '["beefcafe"]'}}}]},
                'Responses': {'thetablename': {'ConsumedCapacityUnits': 5.0}}}
        second_response = {'Responses': {'thetablename':
                {'ConsumedCapacityUnits': 5.0}}}
        write_list.submit.side_effect = [first_response, second_response]
        BatchWriteList.return_value = write_list

        add_image_to_tags('beefcafe', ['bleep', 'bloop'])

        existing_tag_item.__setitem__.assert_called_with('filenames',
                '["facade", "beefcafe"]')
        get_item_batch.assert_called_with(
                ['bleep', 'bloop'], 'tag', ['filenames'])
        table.new_item.assert_called_with(hash_key='bloop',
                attrs={'filenames':'["beefcafe"]'})
        BatchWriteList.assert_called_with(dynamo)
        write_list.add_batch.assert_has_calls([
                call(table, puts=[existing_tag_item, new_tag_item]),
                call(table, puts=[new_tag_item])])
        eq_(write_list.submit.call_count, 2)

Example 130

Project: st2 Source File: test_paramiko_ssh.py
    @patch('paramiko.SSHClient', Mock)
    @patch.object(ParamikoSSHClient, '_consume_stdout',
                  MagicMock(return_value=StringIO('')))
    @patch.object(ParamikoSSHClient, '_consume_stderr',
                  MagicMock(return_value=StringIO('')))
    @patch.object(os.path, 'exists', MagicMock(return_value=True))
    @patch.object(os, 'stat', MagicMock(return_value=None))
    @patch.object(ParamikoSSHClient, '_is_key_file_needs_passphrase',
                  MagicMock(return_value=False))
    def test_sftp_connection_is_only_established_if_required(self):
        # Verify that SFTP connection is lazily established only if and when needed.
        conn_params = {'hostname': 'dummy.host.org',
                       'username': 'ubuntu'}

        # Verify sftp connection and client hasn't been established yet
        client = ParamikoSSHClient(**conn_params)
        client.connect()

        self.assertTrue(client.sftp_client is None)

        # run method doesn't require sftp access so it shouldn't establish connection
        client = ParamikoSSHClient(**conn_params)
        client.connect()
        client.run(cmd='whoami')

        self.assertTrue(client.sftp_client is None)

        # Methods bellow require SFTP access so they should cause SFTP connection to be established
        # put
        client = ParamikoSSHClient(**conn_params)
        client.connect()
        path = '/root/random_script.sh'
        client.put(path, path, mirror_local_mode=False)

        self.assertTrue(client.sftp_client is not None)

        # exists
        client = ParamikoSSHClient(**conn_params)
        client.connect()
        client.exists('/root/somepath.txt')

        self.assertTrue(client.sftp_client is not None)

        # mkdir
        client = ParamikoSSHClient(**conn_params)
        client.connect()
        client.mkdir('/root/somedirfoo')

        self.assertTrue(client.sftp_client is not None)

        # Verify close doesn't throw if SFTP connection is not established
        client = ParamikoSSHClient(**conn_params)
        client.connect()

        self.assertTrue(client.sftp_client is None)
        client.close()

        # Verify SFTP connection is closed if it's opened
        client = ParamikoSSHClient(**conn_params)
        client.connect()
        client.mkdir('/root/somedirfoo')

        self.assertTrue(client.sftp_client is not None)
        client.close()

        self.assertEqual(client.sftp_client.close.call_count, 1)

Example 131

Project: faitout Source File: test_faitoutlib.py
    def test_get_new_connection(self):
        """ Test the get_new_connection method of faitoutlib. """
        create_connections(self.session)

        faitoutlib.create_database = mock.MagicMock()

        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.1', cnt=True), 3)
        # Fails as 127.0.0.1 already has 3 active connections
        self.assertRaises(
            faitoutlib.TooManyConnectionException,
            faitoutlib.get_new_connection,
            self.session,
            admin_engine=None,
            remote_ip='127.0.0.1',
            host='localhost',
            port=5432,
            max_con=3,
            outformat='text',
            unlimited=False
            )
        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.1', cnt=True), 3)

        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.2', cnt=True), 0)
        connection = faitoutlib.get_new_connection(
            self.session,
            admin_engine=None,
            remote_ip='127.0.0.2',
            host='localhost',
            port=5432,
            max_con=3,
            outformat='text',
            unlimited=False
            )
        self.assertTrue(connection.startswith('postgresql://'))
        self.assertTrue('localhost:5432' in connection)
        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.2', cnt=True), 1)

        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.2', cnt=True), 1)
        connection = faitoutlib.get_new_connection(
            self.session,
            admin_engine=None,
            remote_ip='127.0.0.2',
            host='localhost',
            port=5432,
            max_con=3,
            outformat='json',
            unlimited=False
            )
        self.assertEqual(
            sorted(connection.keys()),
            ['dbname', 'host', 'password', 'port', 'username'])
        self.assertEqual(connection['host'], 'localhost')
        self.assertEqual(connection['port'], 5432)
        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.2', cnt=True), 2)

        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.1', cnt=True), 3)
        connection = faitoutlib.get_new_connection(
            self.session,
            admin_engine=None,
            remote_ip='127.0.0.1',
            host='localhost',
            port=5432,
            max_con=3,
            outformat='json',
            unlimited=True
            )
        self.assertEqual(model.Connection.by_ip(
            self.session, '127.0.0.1', cnt=True), 4)
        self.assertEqual(
            sorted(connection.keys()),
            ['dbname', 'host', 'password', 'port', 'username'])
        self.assertEqual(connection['host'], 'localhost')
        self.assertEqual(connection['port'], 5432)

Example 132

Project: subscription-manager Source File: fixture.py
    def setUp(self):
        self.addCleanup(patch.stopall)

        # Never attempt to use the actual managercli.cfg which points to a
        # real file in etc.
        cfg_patcher = patch.object(subscription_manager.managercli, 'cfg', new=stubs.config.CFG)
        self.mock_cfg = cfg_patcher.start()

        # By default mock that we are registered. Individual test cases
        # can override if they are testing disconnected scenario.
        id_mock = NonCallableMock(name='FixtureIdentityMock')
        id_mock.exists_and_valid = Mock(return_value=True)
        id_mock.uuid = 'fixture_identity_mock_uuid'
        id_mock.name = 'fixture_identity_mock_name'
        id_mock.cert_dir_path = "/not/a/real/path/to/pki/consumer/"
        id_mock.keypath.return_value = "/not/a/real/key/path"
        id_mock.certpath.return_value = "/not/a/real/cert/path"

        # Don't really care about date ranges here:
        self.mock_calc = NonCallableMock()
        self.mock_calc.calculate.return_value = None

        # Avoid trying to read real /etc/yum.repos.d/redhat.repo
        self.mock_repofile_path_exists_patcher = patch('subscription_manager.repolib.RepoFile.path_exists')
        mock_repofile_path_exists = self.mock_repofile_path_exists_patcher.start()
        mock_repofile_path_exists.return_value = True

        inj.provide(inj.IDENTITY, id_mock)
        inj.provide(inj.PRODUCT_DATE_RANGE_CALCULATOR, self.mock_calc)

        inj.provide(inj.ENTITLEMENT_STATUS_CACHE, stubs.StubEntitlementStatusCache())
        inj.provide(inj.PROD_STATUS_CACHE, stubs.StubProductStatusCache())
        inj.provide(inj.OVERRIDE_STATUS_CACHE, stubs.StubOverrideStatusCache())
        inj.provide(inj.RELEASE_STATUS_CACHE, stubs.StubReleaseStatusCache())
        inj.provide(inj.PROFILE_MANAGER, stubs.StubProfileManager())
        # By default set up an empty stub entitlement and product dir.
        # Tests need to modify or create their own but nothing should hit
        # the system.
        self.ent_dir = stubs.StubEntitlementDirectory()
        inj.provide(inj.ENT_DIR, self.ent_dir)
        self.prod_dir = stubs.StubProductDirectory()
        inj.provide(inj.PROD_DIR, self.prod_dir)

        # Installed products manager needs PROD_DIR injected first
        inj.provide(inj.INSTALLED_PRODUCTS_MANAGER, stubs.StubInstalledProductsManager())

        self.stub_cp_provider = stubs.StubCPProvider()
        self._release_versions = []
        self.stub_cp_provider.content_connection.get_versions = self._get_release_versions

        inj.provide(inj.CP_PROVIDER, self.stub_cp_provider)
        inj.provide(inj.CERT_SORTER, stubs.StubCertSorter())

        # setup and mock the plugin_manager
        plugin_manager_mock = MagicMock(name='FixturePluginManagerMock')
        plugin_manager_mock.runiter.return_value = iter([])
        inj.provide(inj.PLUGIN_MANAGER, plugin_manager_mock)
        inj.provide(inj.DBUS_IFACE, Mock(name='FixtureDbusIfaceMock'))

        pooltype_cache = Mock()
        inj.provide(inj.POOLTYPE_CACHE, pooltype_cache)
        # don't use file based locks for tests
        inj.provide(inj.ACTION_LOCK, RLock)

        self.stub_facts = stubs.StubFacts()
        inj.provide(inj.FACTS, self.stub_facts)

        self.dbus_patcher = patch('subscription_manager.managercli.CliCommand._request_validity_check')
        self.dbus_patcher.start()

        # No tests should be trying to connect to any configure or test server
        # so really, everything needs this mock. May need to be in __init__, or
        # better, all test classes need to use SubManFixture
        self.is_valid_server_patcher = patch("subscription_manager.managercli.is_valid_server_info")
        is_valid_server_mock = self.is_valid_server_patcher.start()
        is_valid_server_mock.return_value = True

        # No tests should be trying to test the proxy connection
        # so really, everything needs this mock. May need to be in __init__, or
        # better, all test classes need to use SubManFixture
        self.test_proxy_connection_patcher = patch("subscription_manager.managercli.CliCommand.test_proxy_connection")
        test_proxy_connection_mock = self.test_proxy_connection_patcher.start()
        test_proxy_connection_mock.return_value = True

        self.files_to_cleanup = []

Example 133

Project: meld Source File: test_filediff.py
@pytest.mark.parametrize("text, ignored_ranges, expected_text", [
    #    0123456789012345678901234567890123456789012345678901234567890123456789
    # Matching without groups
    (
        "# asdasdasdasdsad",
        [(0, 17)],
        "",
    ),
    # Matching with single group
    (
        "asdasdasdasdsab",
        [(1, 14)],
        "ab",
    ),
    # Matching with multiple groups
    (
        "xasdyasdz",
        [(1, 4), (5, 8)],
        "xyz",
    ),
    # Matching with multiple partially overlapping filters
    (
        "qaqxqbyqzq",
        [(2, 6), (7, 8)],
        "qayzq",
    ),
    # Matching with multiple fully overlapping filters
    (
        "qaqxqybqzq",
        [(2, 8)],
        "qazq",
    ),
    # Matching with and without groups, with single dominated match
    (
        "# asdasdasdasdsab",
        [(0, 17)],
        "",
    ),
    # Matching with and without groups, with partially overlapping filters
    (
        "/*a*/ub",
        [(0, 6)],
        "b",
    ),
    # Non-matching with groups
    (
        "xasdyasdx",
        [],
        "xasdyasdx",
    ),
    # Multiple lines with non-overlapping filters
    (
        "#ab\na2b",
        [(0, 3), (5, 6)],
        "\nab",
    ),
    # CVS keyword
    (
        "$Author: John Doe $",
        [(8, 18)],
        "$Author:$",
    ),

])
def test_filter_text(text, ignored_ranges, expected_text):
    filter_patterns = [
        '#.*',
        '/\*.*\*/',
        'a(.*)b',
        'x(.*)y(.*)z',
        '\$\w+:([^\n$]+)\$'
    ]
    filters = [
        FilterEntry.new_from_gsetting(("name", True, f), FilterEntry.REGEX)
        for f in filter_patterns
    ]

    filediff = mock.MagicMock()
    filediff.text_filters = filters
    filter_text = FileDiff._filter_text

    buf = Gtk.TextBuffer()
    buf.create_tag("inline")
    buf.create_tag("dimmed")
    buf.set_text(text)
    start, end = buf.get_bounds()

    text = filter_text(
        filediff, buf.get_text(start, end, False), buf, start, end)

    # Find ignored ranges
    tag = buf.get_tag_table().lookup("dimmed")
    toggles = []
    it = start.copy()
    if it.toggles_tag(tag):
        toggles.append(it.get_offset())
    while it.forward_to_tag_toggle(tag):
        toggles.append(it.get_offset())
    toggles = list(zip(toggles[::2], toggles[1::2]))

    print("Text:", text)
    print("Toggles:", toggles)

    assert toggles == ignored_ranges
    assert text == expected_text

Example 134

Project: synapsePythonClient Source File: integration_test.py
def test_login():
    try:
        # Test that we fail gracefully with wrong user
        assert_raises(SynapseAuthenticationError, syn.login, 'asdf', 'notarealpassword')

        config = configparser.ConfigParser()
        config.read(client.CONFIG_FILE)
        username = config.get('authentication', 'username')
        password = config.get('authentication', 'password')
        sessionToken = syn._getSessionToken(username, password)
        
        # Simple login with ID + PW
        syn.login(username, password, silent=True)
        
        # Login with ID + API key
        syn.login(email=username, apiKey=base64.b64encode(syn.apiKey), silent=True)
        syn.logout(forgetMe=True)
        
        # Config file is read-only for the client, so it must be mocked!
        if (sys.version < '3'):
            configparser_package_name = 'ConfigParser'
        else:
            configparser_package_name = 'configparser'
        with patch("%s.ConfigParser.has_option" % configparser_package_name) as config_has_mock, patch("synapseclient.Synapse._readSessionCache") as read_session_mock:

            config_has_mock.return_value = False
            read_session_mock.return_value = {}
            
            # Login with given bad session token, 
            # It should REST PUT the token and fail
            # Then keep going and, due to mocking, fail to read any credentials
            assert_raises(SynapseAuthenticationError, syn.login, sessionToken="Wheeeeeeee")
            assert config_has_mock.called
            
            # Login with no credentials 
            assert_raises(SynapseAuthenticationError, syn.login)
            
            config_has_mock.reset_mock()
            config_has_mock.side_effect = lambda section, option: section == "authentication" and option == "sessiontoken"
            with patch("%s.ConfigParser.get" % configparser_package_name) as config_get_mock:

                # Login with a session token from the config file
                config_get_mock.return_value = sessionToken
                syn.login(silent=True)
                
                # Login with a bad session token from the config file
                config_get_mock.return_value = "derp-dee-derp"
                assert_raises(SynapseAuthenticationError, syn.login)
        
        # Login with session token
        syn.login(sessionToken=sessionToken, rememberMe=True, silent=True)
        
        # Login as the most recent user
        with patch('synapseclient.Synapse._readSessionCache') as read_session_mock:
            dict_mock = MagicMock()
            read_session_mock.return_value = dict_mock
            dict_mock.__contains__.side_effect = lambda x: x == '<mostRecent>'
            dict_mock.__getitem__.return_value = syn.username
            syn.login(silent=True)
            dict_mock.__getitem__.assert_called_once_with('<mostRecent>')
        
        # Login with ID only
        syn.login(username, silent=True)
        syn.logout(forgetMe=True)
    except configparser.Error:
        print("To fully test the login method, please supply a username and password in the configuration file")

    finally:
        # Login with config file
        syn.login(rememberMe=True, silent=True)

Example 135

Project: ZeroNet Source File: TestSiteDownload.py
Function: test_update
    def testUpdate(self, file_server, site, site_temp):
        file_server.ip_incoming = {}  # Reset flood protection

        assert site.storage.directory == config.data_dir + "/" + site.address
        assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address

        # Init source server
        site.connection_server = file_server
        file_server.sites[site.address] = site

        # Init client server
        client = FileServer("127.0.0.1", 1545)
        client.sites[site_temp.address] = site_temp
        site_temp.connection_server = client

        # Don't try to find peers from the net
        site.announce = mock.MagicMock(return_value=True)
        site_temp.announce = mock.MagicMock(return_value=True)

        # Connect peers
        site_temp.addPeer("127.0.0.1", 1544)

        # Download site from site to site_temp
        site_temp.download(blind_includes=True).join(timeout=5)

        # Update file
        data_original = site.storage.open("data/data.json").read()
        data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json", "wb").write(data_new)

        assert site.storage.open("data/data.json").read() == data_new
        assert site_temp.storage.open("data/data.json").read() == data_original

        site.log.info("Publish new data.json without patch")
        # Publish without patch
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
            site.publish()
            time.sleep(0.1)
            site_temp.download(blind_includes=True).join(timeout=5)
            assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1

        assert site_temp.storage.open("data/data.json").read() == data_new

        # Close connection to avoid update spam limit
        site.peers.values()[0].remove()
        site.addPeer("127.0.0.1", 1545)
        site_temp.peers.values()[0].ping()  # Connect back
        time.sleep(0.1)

        # Update with patch
        data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"')
        assert data_original != data_new

        site.storage.open("data/data.json-new", "wb").write(data_new)

        assert site.storage.open("data/data.json-new").read() == data_new
        assert site_temp.storage.open("data/data.json").read() != data_new

        # Generate diff
        diffs = site.content_manager.getDiffs("content.json")
        assert not site.storage.isFile("data/data.json-new")  # New data file removed
        assert site.storage.open("data/data.json").read() == data_new  # -new postfix removed
        assert "data/data.json" in diffs
        assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]

        # Publish with patch
        site.log.info("Publish new data.json with patch")
        with Spy.Spy(FileRequest, "route") as requests:
            site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
            site.publish(diffs=diffs)
            site_temp.download(blind_includes=True).join(timeout=5)
            assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 0

        assert site_temp.storage.open("data/data.json").read() == data_new

        assert site_temp.storage.deleteFiles()
        [connection.close() for connection in file_server.connections]

Example 136

Project: cloudify-cli Source File: test_profiles.py
    def test_export_profiles_with_keys(self):
        self.client.manager.get_status = MagicMock()
        self.client.manager.get_context = MagicMock(
            return_value={
                'name': 'name',
                'context': {}}
        )
        fd1, profiles_archive = tempfile.mkstemp()
        fd2, key = tempfile.mkstemp()
        os.close(fd1)
        os.close(fd2)
        with open(key, 'w') as f:
            f.write('aaa')
        self.use_manager(ssh_key_path=key)
        self.invoke('profiles list')
        try:
            self.invoke('cfy profiles export -o {0} --include-keys'.format(
                profiles_archive))
            with closing(tarfile.open(name=profiles_archive)) as tar:
                members = [member.name for member in tar.getmembers()]
            self.assertIn('profiles/10.10.1.10/context', members)
            self.assertIn('profiles/{0}/{1}.10.10.1.10.profile'.format(
                profiles.EXPORTED_KEYS_DIRNAME,
                os.path.basename(key)), members)
            cfy.purge_dot_cloudify()
            os.remove(key)
            self.assertFalse(os.path.isdir(env.PROFILES_DIR))

            # First make sure that the ssh keys message is being logged
            self.invoke('cfy init')
            outcome = self.invoke(
                'cfy profiles import {0}'
                .format(profiles_archive)
            )
            self.assertIn(
                'The profiles archive you provided contains ssh keys',
                outcome.logs
            )

            # Then actually import the profile with the keys
            cfy.purge_dot_cloudify()
            self.invoke('cfy init')
            self.invoke(
                'cfy profiles import {0} --include-keys'
                .format(profiles_archive)
            )

            self.assertTrue(os.path.isfile(
                os.path.join(env.PROFILES_DIR, '10.10.1.10', 'context')))
            self.assertTrue(os.path.isfile(key))
        finally:
            os.remove(key)
            os.remove(profiles_archive)

Example 137

Project: golem Source File: test_ranking.py
    def test_without_reactor(self):
        r = Ranking(MagicMock(spec=Client))
        r.client.get_neighbours_degree.return_value = {'ABC': 4, 'JKL': 2, 'MNO': 5}
        r.client.collect_stopped_peers.return_value = set()
        reactor = MagicMock()
        r.run(reactor)
        assert r.reactor == reactor
        r.increase_trust("ABC", RankingStats.computed, 1)
        r.increase_trust("DEF", RankingStats.requested, 1)
        r.increase_trust("DEF", RankingStats.payment, 1)
        r.increase_trust("GHI", RankingStats.resource, 1)
        r.decrease_trust("DEF", RankingStats.computed, 1)
        r.decrease_trust("XYZ", RankingStats.wrong_computed, 1)
        r.decrease_trust("XYZ", RankingStats.requested, 1)
        r.increase_trust("XYZ", RankingStats.requested, 1)
        r.decrease_trust("XYZ", RankingStats.payment, 1)
        r.decrease_trust("DEF", RankingStats.resource, 1)
        with self.assertLogs(logger, level="WARNING"):
            r.increase_trust("XYZ", "UNKNOWN", 1)
        with self.assertLogs(logger, level="WARNING"):
            r.decrease_trust("XYZ", "UNKNOWN", 1)

        r._Ranking__init_stage()
        assert not r.finished
        assert not r.global_finished
        assert r.step == 0
        assert len(r.finished_neighbours) == 0
        for v in r.working_vec.itervalues():
            assert v[0][1] == 1.0
            assert v[1][1] == 1.0
        assert r.working_vec["ABC"][0][0] > 0.0
        assert r.working_vec["ABC"][1][0] == 0.0
        assert r.working_vec["DEF"][0][0] < 0.0
        assert r.working_vec["DEF"][1][0] > 0.0
        assert r.working_vec["GHI"][0][0] == 0.0
        assert r.working_vec["GHI"][1][0] == 0.0
        assert r.working_vec["XYZ"][0][0] < 0.0
        assert r.working_vec["XYZ"][1][0] < 0.0

        assert r.prevRank["ABC"][0] > 0
        assert r.prevRank["ABC"][1] == 0
        assert r.prevRank["DEF"][0] < 0
        assert r.prevRank["DEF"][1] > 0
        assert r.prevRank["GHI"][0] == 0
        assert r.prevRank["GHI"][1] == 0
        assert r.prevRank["XYZ"][0] < 0
        assert r.prevRank["XYZ"][1] < 0

        r._Ranking__new_round()
        assert set(r.neighbours) == {'ABC', 'JKL', 'MNO'}
        assert r.k == 1
        assert r.step == 1
        assert len(r.received_gossip[0]) == 4
        found = False
        for gossip in r.received_gossip[0]:
            if gossip[0] == "DEF":
                found = True
                assert gossip[1][0][0] < 0
                assert gossip[1][0][0] > r.working_vec["DEF"][0][0]
                assert gossip[1][0][1] == 0.5
                assert gossip[1][1][0] > 0
                assert gossip[1][0][0] < r.working_vec["DEF"][1][0]
                assert gossip[1][0][1] == 0.5
        assert found
        assert r.client.send_gossip.called
        assert r.client.send_gossip.call_args[0][0] == r.received_gossip[0]
        assert r.client.send_gossip.call_args[0][1][0] in ["ABC", "JKL", "MNO"]

        r.client.collect_neighbours_loc_ranks.return_value = [['ABC', 'XYZ', [-0.2, -0.5]],
                                                              ['JKL', 'PQR', [0.8, 0.7]]]
        r.sync_network()

        r.client.collect_gossip.return_value = [[["MNO", [[0.2, 0.2], [-0.1, 0.3]]],
                                                ["ABC", [[0.3, 0.5], [0.3, 0.5]]]]]
        r._Ranking__end_round()
        assert len(r.prevRank) == 4
        assert len(r.received_gossip) == 0
        assert len(r.working_vec) == 5
        assert r.working_vec["ABC"][0][0] > r.prevRank["ABC"][0]
        assert r.working_vec["MNO"][1][0] < 0.0
        assert not r.finished
        assert not r.global_finished

        r._Ranking__make_break()
        r._Ranking__new_round()
        assert r.step == 2
        r.client.collect_gossip.return_value = []
        r._Ranking__end_round()
        assert r.finished
        r.client.send_stop_gossip.assert_called_with()
        r.client.collect_stopped_peers.return_value = {"ABC", "JKL"}
        r._Ranking__make_break()
        assert not r.global_finished
        r.client.collect_stopped_peers.return_value = {"MNO"}
        r._Ranking__make_break()
        assert r.global_finished

Example 138

Project: hyclops Source File: test_vsphere.py
Function: set_libcloud_mock
    def _set_libcloud_mock(self):
        MockVSphereNodeDriver.clear_mock()
        node = MockNode(**{
            "id": "vsphere_uuid",
            "name": "vSphereVM",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /foo/bar.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "CentOS 4/5/6 (64bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        node = MockNode(**{
            "id": "duplicate_uuid1",
            "name": "Duplicate1 (update uuid host)",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /dup1/dup.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "Microsoft Windows Server 2008 R2 (64-bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        node = MockNode(**{
            "id": "duplicate_uuid1",
            "name": "Duplicate (update duplicate host)",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /dup/dup.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "Microsoft Windows Server 2008 R2 (64-bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        node = MockNode(**{
            "id": "duplicate_uuid1",
            "name": "Duplicate2 (create as duplicate host)",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /dup2/dup.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "Microsoft Windows Server 2008 R2 (64-bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        node = MockNode(**{
            "id": "duplicate_uuid2",
            "name": "Duplicate3 (update duplicate host to uuid host)",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /dup3/dup.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "Red Hat Enterprise Linux 6 (64-bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        node = MockNode(**{
            "id": "duplicate_uuid3",
            "name": "Duplicate4 (Create uuid host)",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /dup4/dup.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "Red Hat Enterprise Linux 6 (64-bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        node = MockNode(**{
            "id": "duplicate_uuid3",
            "name": "Duplicate5 (Create duplicate host at same time)",
            "state": 0,
            "public_ips": [],
            "private_ips": [],
            "driver": self.driver,
            "extra": {
                "managedObjectReference": MagicMock(),
                "vmpath": "[datastore1] /dup5/dup.vmx",
                "status": "running",
                "cpu": 1,
                "cpu_usage": 12.34,
                "memory": 2 * 1024 ** 3,
                "memory_usage": 600 * 1024 ** 2,
                "toolsRunningStatus": "guestToolsRunning",
                "toolsVersionStatus": "guestToolsCurrent",
                "stuck_state": False,
                "stuck_question_id": None,
                "stuck_question": None,
                "stuck_choices": None,
                "platform": "Red Hat Enterprise Linux 6 (64-bit)",
            }
        })
        MockVSphereNodeDriver.add_mock_node(node)
        hardware_profile = {
            "id": "hardware_uuid",
            "name": "host name",
            "cpu": 8,
            "cpu_usage": 12.34,
            "cpu_assigned": 4,
            "memory": 16 * 1024 ** 3,
            "memory_usage": 8 * 1024 ** 3,
            "memory_assigned": 8 * 1024 ** 3,
            "datastores": [
                {"name": "datastore1", "freeSpace": 600 * 1024 ** 3,
                 "capacity": 2 * 1024 ** 4, "type": "nfs"}
            ]
        }
        MockVSphereNodeDriver.add_mock_hardware_profile(hardware_profile)

Example 139

Project: scrapy-cluster Source File: tests_offline.py
    def test_load_stats_plugins(self):
        # lets assume we are loading the default plugins
        self.kafka_monitor._load_plugins()

        # test no rolling stats
        self.kafka_monitor.stats_dict = {}
        self.kafka_monitor.settings['STATS_TIMES'] = []
        self.kafka_monitor._setup_stats_plugins(MagicMock())
        defaults = [
            'ScraperHandler',
            'ActionHandler',
            'StatsHandler'
        ]

        self.assertEquals(
            sorted(self.kafka_monitor.stats_dict['plugins'].keys()),
            sorted(defaults))

        for key in self.kafka_monitor.plugins_dict:
            plugin_name = self.kafka_monitor.plugins_dict[key]['instance'].__class__.__name__
            self.assertEquals(
                self.kafka_monitor.stats_dict['plugins'][plugin_name].keys(),
                ['lifetime'])

        # test good/bad rolling stats
        self.kafka_monitor.stats_dict = {}
        self.kafka_monitor.settings['STATS_TIMES'] = [
            'SECONDS_15_MINUTE',
            'SECONDS_1_HOUR',
            'SECONDS_DUMB',
        ]
        good = [
            'lifetime',  # for totals, not DUMB
            900,
            3600,
        ]

        self.kafka_monitor._setup_stats_plugins(MagicMock())

        self.assertEquals(
            sorted(self.kafka_monitor.stats_dict['plugins'].keys()),
            sorted(defaults))

        for key in self.kafka_monitor.plugins_dict:
            plugin_name = self.kafka_monitor.plugins_dict[key]['instance'].__class__.__name__
            self.assertEquals(
                sorted(self.kafka_monitor.stats_dict['plugins'][plugin_name].keys()),
                sorted(good))

        for plugin_key in self.kafka_monitor.stats_dict['plugins']:
            k1 = 'stats:kafka-monitor:{p}'.format(p=plugin_key)
            for time_key in self.kafka_monitor.stats_dict['plugins'][plugin_key]:
                if time_key == 0:
                    self.assertEquals(
                        self.kafka_monitor.stats_dict['plugins'][plugin_key][0].key,
                        '{k}:lifetime'.format(k=k1)
                        )
                else:
                    self.assertEquals(
                        self.kafka_monitor.stats_dict['plugins'][plugin_key][time_key].key,
                        '{k}:{t}'.format(k=k1, t=time_key)
                        )

Example 140

Project: django-location Source File: test_runmeter.py
    def test_process_existing_source(self):
        arbitrary_url = 'http://www.go.com/101'
        arbitrary_route_name = 'Something'
        arbitrary_source = models.LocationSource.objects.create(
            name='Whatnot',
            user=self.user,
            type=self.source_type,
            active=True,
            data={
                'url': arbitrary_url,
                'known_points': {
                    'alpha': 'arbitrary_value'
                },
            }
        )
        arbitrary_docuement = MagicMock()
        arbitrary_time = datetime.datetime.utcnow().replace(
            tzinfo=utc
        )
        arbitrary_points = [
            {'lat': -122, 'lng': 45, 'key': 'alpha', 'time': 1},
            {'lat': -123, 'lng': 44, 'key': 'beta', 'time': 2}
        ]

        consumer = RunmeterConsumer(arbitrary_source)
        consumer._get_docuement = MagicMock(
            return_value=arbitrary_docuement
        )
        consumer.get_start_time = MagicMock(
            return_value=arbitrary_time
        )
        consumer.get_route_name = MagicMock(
            return_value=arbitrary_route_name
        )
        consumer.get_points = MagicMock(
            return_value=arbitrary_points
        )
        consumer.is_active = MagicMock(
            return_value=True
        )

        consumer.process()

        consumer._get_docuement.assert_called_with(arbitrary_url)
        consumer.get_start_time.assert_called_with(arbitrary_docuement)
        consumer.get_route_name.assert_called_with(arbitrary_docuement)
        consumer.get_points.assert_called_with(
            arbitrary_docuement,
            arbitrary_time
        )

        actual_points = models.LocationSnapshot.objects.order_by('date')
        self.assertEqual(actual_points.count(), 1)

        assertions = {
            'date': arbitrary_time + datetime.timedelta(seconds=2),
            'source': arbitrary_source,
            'location': Point(-123, 44)
        }
        for k, v in assertions.items():
            self.assertEqual(getattr(actual_points[0], k), v)

        self.assertTrue(
            models.LocationSource.objects.get(pk=arbitrary_source.pk).active
        )

Example 141

Project: asiaq Source File: test_disco_ssm.py
def mock_boto3_client(arg):
    """ mock method for boto3.client() """
    if arg != "ssm":
        raise Exception("Mock %s client not implemented.", arg)

    mock_asiaq_docuements = copy.copy(MOCK_ASIAQ_DOCUMENTS)
    mock_asiaq_docuement_contents = copy.copy(MOCK_ASIAQ_DOCUMENT_CONTENTS)
    wait_flags = {'delete': True, 'create': True}

    def _mock_list_docuements(NextToken=''):
        all_docuements = MOCK_AWS_DOCUMENTS + mock_asiaq_docuements
        if NextToken == '':
            return {
                'DocuementIdentifiers': all_docuements[:len(all_docuements) / 2],
                'NextToken': MOCK_NEXT_TOKEN
            }
        elif NextToken == MOCK_NEXT_TOKEN:
            return {
                'DocuementIdentifiers': all_docuements[len(all_docuements) / 2:],
                'NextToken': ''
            }
        else:
            raise RuntimeError("Invalid NextToken: {0}".format(NextToken))

    def _mock_get_docuement(Name):
        if Name not in mock_asiaq_docuement_contents:
            raise ClientError({'Error': {'Code': 'Mock_code', 'Message': 'mock message'}},
                              'GetDocuement')

        return {'Name': Name,
                'Content': mock_asiaq_docuement_contents[Name]}

    def _mock_create_docuement(Content, Name):
        mock_asiaq_docuements.append({'Name': Name,
                                     'Owner': 'mock_owner',
                                     'PlatformTypes': ['Linux']})
        mock_asiaq_docuement_contents[Name] = Content

    def _mock_delete_docuement(Name):
        doc_to_delete = [docuement for docuement in mock_asiaq_docuements
                         if docuement['Name'] == Name]
        if doc_to_delete:
            mock_asiaq_docuements.remove(doc_to_delete[0])
        mock_asiaq_docuement_contents.pop(Name, None)

    def _mock_describe_docuement(Name):
        # Using two wait flags to simulate that AWS is taking time to delete and
        # create docuements
        if Name not in mock_asiaq_docuement_contents:
            if wait_flags['delete']:
                wait_flags['delete'] = False
                return {'Docuement': {'Name': Name, 'Status': 'Active'}}
            else:
                wait_flags['delete'] = True
                raise ClientError({'Error': {'Code': 'Mock_code', 'Message': 'mock message'}},
                                  'DescribeDocuement')
        else:
            if wait_flags['create']:
                wait_flags['create'] = False
                return {'Docuement': {'Name': Name, 'Status': 'Creating'}}
            else:
                wait_flags['create'] = True
                return {'Docuement': {'Name': Name, 'Status': 'Active'}}

    def _mock_send_command(InstanceIds, DocuementName, Comment=None, Parameters=None, OutputS3BucketName=None):
        mock_command = create_mock_command(InstanceIds, DocuementName, Comment, Parameters, OutputS3BucketName)
        return {"Command": mock_command}

    def _mock_list_commands(CommandId):
        filtered_mock_commands = []

        for command in MOCK_COMMANDS:
            if command['CommandId'] == CommandId:
                filtered_mock_commands.append(command)
                break

        return {"Commands": filtered_mock_commands}

    def _mock_list_command_invocations(CommandId, Details):
        filtered_mock_command_invocations = []

        for command_invocation in MOCK_COMMAND_INVOCATIONS:
            if command_invocation['CommandId'] == CommandId:
                filtered_mock_command_invocations.append(command_invocation)

        return {"CommandInvocations": filtered_mock_command_invocations}

    mock_ssm = MagicMock()
    mock_ssm.list_docuements.side_effect = _mock_list_docuements
    mock_ssm.get_docuement.side_effect = _mock_get_docuement
    mock_ssm.create_docuement.side_effect = _mock_create_docuement
    mock_ssm.delete_docuement.side_effect = _mock_delete_docuement
    mock_ssm.describe_docuement.side_effect = _mock_describe_docuement
    mock_ssm.send_command.side_effect = _mock_send_command
    mock_ssm.list_commands.side_effect = _mock_list_commands
    mock_ssm.list_command_invocations.side_effect = _mock_list_command_invocations

    return mock_ssm

Example 142

Project: mpop Source File: test_scene.py
    def test_project(self):
        """Projecting a scene.
        """
        area = random_string(8)
        area2 = random_string(8)
        # scene with 3 channels

        channels = [["00_7", (0.5, 0.7, 0.9), 2500],
                    ["06_4", (5.7, 6.4, 7.1), 5000],
                    ["11_5", (10.5, 11.5, 12.5), 5000]]

        class SatelliteInstrumentScene2(SatelliteInstrumentScene):

            """Dummy satinst class.
            """
            instrument_name = random_string(8)
            channel_list = channels

        # case of a swath

        self.scene = SatelliteInstrumentScene2(area=None)

        # With data

        self.scene[0.7] = np.ma.array(np.random.rand(3, 3),
                                      mask=np.array(np.random.rand(3, 3) * 2,
                                                    dtype=int))
        self.scene[6.4] = np.ma.array(np.random.rand(3, 3),
                                      mask=np.array(np.random.rand(3, 3) * 2,
                                                    dtype=int))
        self.scene[6.4].area = MagicMock()

        res = self.scene.project(area2)
        self.assertEquals(res[0.7].shape, (3, 3))
        self.assertEquals(res[6.4].shape, (3, 3))
        self.assertRaises(KeyError, res.__getitem__, 11.5)

        self.scene[0.7].area = self.scene[6.4].area
        res = self.scene.project(area2, channels=[0.7])
        self.assertEquals(res[0.7].shape, (3, 3))
        self.assertRaises(KeyError, res.__getitem__, 6.4)

        res = self.scene.project(area2, channels=[0.7, 11.5])
        self.assertEquals(res[0.7].shape, (3, 3))
        self.assertRaises(KeyError, res.__getitem__, 11.5)

        res = self.scene.project(area2, channels=[])
        self.assertRaises(KeyError, res.__getitem__, 0.7)

        self.assertRaises(TypeError, self.scene.project,
                          area2, channels=11.5)

        # case of a grid

        self.scene = SatelliteInstrumentScene2(area=area)

        # With data

        self.scene[0.7] = np.ma.array(np.random.rand(3, 3),
                                      mask=np.array(np.random.rand(3, 3) * 2,
                                                    dtype=int))
        self.scene[6.4] = np.ma.array(np.random.rand(3, 3),
                                      mask=np.array(np.random.rand(3, 3) * 2,
                                                    dtype=int))
        self.scene[11.5] = np.ma.array(np.random.rand(3, 3),
                                       mask=np.array(np.random.rand(3, 3) * 2,
                                                     dtype=int))

        res = self.scene.project(area2)
        self.assertEquals(res[11.5].shape, (3, 3))

        self.scene[0.7].area = MagicMock()
        res = self.scene.project(area2, channels=[0.7])
        self.assertEquals(res[0.7].shape, (3, 3))
        self.assertRaises(KeyError, res.__getitem__, 6.4)

        self.scene[6.4].area = MagicMock()
        self.scene[11.5].area = MagicMock()
        res = self.scene.project(area2)
        self.assertEquals(res[0.7].shape, (3, 3))

        # case of self projection

        self.scene = SatelliteInstrumentScene2(area=area)

        # With data

        self.scene[0.7] = np.ma.array(np.random.rand(3, 3),
                                      mask=np.array(np.random.rand(3, 3) * 2,
                                                    dtype=int))
        self.scene[6.4] = np.ma.array(np.random.rand(3, 3),
                                      mask=np.array(np.random.rand(3, 3) * 2,
                                                    dtype=int))
        self.scene[11.5] = np.ma.array(np.random.rand(3, 3),
                                       mask=np.array(np.random.rand(3, 3) * 2,
                                                     dtype=int))

        self.scene[6.4].area = MagicMock()
        res = self.scene.project(area)
        self.assertEquals(res[0.7].shape, (3, 3))
        self.assertEquals(res[6.4].shape, (3, 3))
        self.assertEquals(res[11.5].shape, (3, 3))

        self.scene[11.5].area = self.scene[6.4].area
        self.scene[0.7].area = self.scene[6.4].area
        res = self.scene.project(area, channels=None)
        self.assertEquals(res[0.7].shape, (3, 3))
        self.assertEquals(res[6.4].shape, (3, 3))
        self.assertEquals(res[11.5].shape, (3, 3))

Example 143

Project: facepy Source File: test_graph_api.py
@with_setup(mock, unmock)
def test_paged_get():
    graph = GraphAPI('<access token>')
    limit = 2

    responses = [
        {
            'data': [
                {
                    'message': 'He\'s a complicated man. And the only one that understands him is his woman'
                }
            ] * 2,
            'paging': {
                'next': 'https://graph.facebook.com/herc/posts?limit=%(limit)s&offset=%(limit)s&access_token=<access token>' % {
                    'limit': limit
                }
            }
        },
        {
            'data': [
                {
                    'message': 'He\'s a complicated man. And the only one that understands him is his woman'
                }
            ],
            'paging': {
                'next': 'https://graph.facebook.com/herc/posts?limit=%(limit)s&offset=%(limit)s&access_token=<access token>' % {
                    'limit': limit
                }
            }
        },
        {
            'data': [],
            'paging': {
                'previous': 'https://graph.facebook.com/herc/posts?limit=%(limit)s&offset=%(limit)s&access_token=<access token>' % {
                    'limit': limit
                }
            }
        }
    ]

    def side_effect(*args, **kwargs):
        response = responses.pop(0)

        return MagicMock(content=json.dumps(response), status_code=200)

    mock_request.side_effect = side_effect

    pages = graph.get('herc/posts', page=True)

    for index, page in enumerate(pages):
        pass

    assert_equal(index, 2)

Example 144

Project: mock-django Source File: query.py
def QuerySetMock(model, *return_value):
    """
    Get a SharedMock that returns self for most attributes and a new copy of
    itself for any method that ordinarily generates QuerySets.

    Set the results to two items:

    >>> class Post(object): pass
    >>> objects = QuerySetMock(Post, 'return', 'values')
    >>> assert list(objects.filter()) == list(objects.all())

    Force an exception:

    >>> objects = QuerySetMock(Post, Exception())

    Chain calls:
    >>> objects.all().filter(filter_arg='dummy')
    """

    def make_get(self, model):
        def _get(*a, **k):
            results = list(self)
            if len(results) > 1:
                raise model.MultipleObjectsReturned
            try:
                return results[0]
            except IndexError:
                raise model.DoesNotExist
        return _get

    def make_qs_returning_method(self):
        def _qs_returning_method(*a, **k):
            return copy.deepcopy(self)
        return _qs_returning_method

    def make_getitem(self):
        def _getitem(k):
            if isinstance(k, slice):
                self.__start = k.start
                self.__stop = k.stop
            else:
                return list(self)[k]
            return self
        return _getitem

    def make_iterator(self):
        def _iterator(*a, **k):
            if len(return_value) == 1 and isinstance(return_value[0], Exception):
                raise return_value[0]

            start = getattr(self, '__start', None)
            stop = getattr(self, '__stop', None)
            for x in return_value[start:stop]:
                yield x
        return _iterator

    actual_model = model
    if actual_model:
        model = mock.MagicMock(spec=actual_model())
    else:
        model = mock.MagicMock()

    m = SharedMock(reserved=['count', 'exists'] + QUERYSET_RETURNING_METHODS)
    m.__start = None
    m.__stop = None
    m.__iter__.side_effect = lambda: iter(m.iterator())
    m.__getitem__.side_effect = make_getitem(m)
    if hasattr(m, "__nonzero__"):
        # Python 2
        m.__nonzero__.side_effect = lambda: bool(return_value)
        m.exists.side_effect = m.__nonzero__
    else:
        # Python 3
        m.__bool__.side_effect = lambda: bool(return_value)
        m.exists.side_effect = m.__bool__
    m.__len__.side_effect = lambda: len(return_value)
    m.count.side_effect = m.__len__

    m.model = model
    m.get = make_get(m, actual_model)

    for method_name in QUERYSET_RETURNING_METHODS:
        setattr(m, method_name, make_qs_returning_method(m))

    # Note since this is a SharedMock, *all* auto-generated child
    # attributes will have the same side_effect ... might not make
    # sense for some like count().
    m.iterator.side_effect = make_iterator(m)
    return m

Example 145

Project: td-client-python Source File: schedule_api_test.py
def test_list_schedules_success():
    td = api.API("APIKEY")
    body = b"""
        {
            "schedules":[
                {
                    "name": "foo",
                    "cron": null,
                    "timezone": "UTC",
                    "delay": 0,
                    "created_at": "2016-08-02T17:58:40Z",
                    "type": "presto",
                    "query": "SELECT COUNT(1) FROM nasdaq;",
                    "database": "sample_datasets",
                    "user_name": "Yuu Yamashita",
                    "priority": 0,
                    "retry_limit": 0,
                    "result": "",
                    "next_time": null
                },
                {
                    "name": "bar",
                    "cron": "0 0 * * *",
                    "timezone": "UTC",
                    "delay": 0,
                    "created_at": "2016-08-02T18:01:04Z",
                    "type": "presto",
                    "query": "SELECT COUNT(1) FROM nasdaq;",
                    "database": "sample_datasets",
                    "user_name": "Kazuki Ota",
                    "priority": 0,
                    "retry_limit": 0,
                    "result": "",
                    "next_time": "2016-09-24T00:00:00Z"
                },
                {
                    "name": "baz",
                    "cron": "* * * * *",
                    "timezone": "UTC",
                    "delay": 0,
                    "created_at": "2016-03-02T23:01:59Z",
                    "type": "hive",
                    "query": "SELECT COUNT(1) FROM nasdaq;",
                    "database": "sample_datasets",
                    "user_name": "Yuu Yamashita",
                    "priority": 0,
                    "retry_limit": 0,
                    "result": "",
                    "next_time": "2016-07-06T00:00:00Z"
                }
            ]
        }
    """
    td.get = mock.MagicMock(return_value=make_response(200, body))
    schedules = td.list_schedules()
    td.get.assert_called_with("/v3/schedule/list")
    assert len(schedules) == 3
    next_time = sorted([ schedule.get("next_time") for schedule in schedules if "next_time" in schedule ])
    assert len(next_time) == 3
    assert next_time[2].year == 2016
    assert next_time[2].month == 9
    assert next_time[2].day == 24
    assert next_time[2].hour == 0
    assert next_time[2].minute == 0
    assert next_time[2].second == 0
    created_at = sorted([ schedule.get("created_at") for schedule in schedules if "created_at" in schedule ])
    assert len(created_at) == 3
    assert created_at[2].year == 2016
    assert created_at[2].month == 8
    assert created_at[2].day == 2
    assert created_at[2].hour == 18
    assert created_at[2].minute == 1
    assert created_at[2].second == 4

Example 146

Project: mpop Source File: test_projector.py
    @patch.object(utils, 'generate_quick_linesample_arrays')
    @patch.object(mpop.projector.kd_tree, 'get_neighbour_info')
    @patch.object(mpop.projector, '_get_area_hash')
    def test_init(self, gah, gni, gqla):
        """Creation of coverage.
        """

        # in case of wrong number of arguments

        self.assertRaises(TypeError, Projector)
        self.assertRaises(TypeError, Projector, random_string(20))


        # in case of string arguments

        in_area_id = random_string(20)
        out_area_id = random_string(20)

        area_type = utils.parse_area_file.return_value.__getitem__.return_value

        gni.side_effect = [("a", "b", "c", "d")] * 10

        self.proj = Projector(in_area_id, out_area_id)
        self.assertEquals(utils.parse_area_file.call_count, 2)
        area_file = mpop.projector.get_area_file()
        utils.parse_area_file.assert_any_call(area_file, in_area_id)
        utils.parse_area_file.assert_any_call(area_file, out_area_id)



        self.assertEquals(self.proj.in_area, area_type)
        self.assertEquals(self.proj.out_area, area_type)


        # in case of undefined areas

        mock = MagicMock(side_effect=Exception("raise"))
        with patch.object(utils, 'parse_area_file', mock):
            self.assertRaises(Exception,
                              Projector,
                              "raise",
                              random_string(20))
            self.assertRaises(Exception,
                              Projector,
                              random_string(20),
                              "raise")

        # in case of geometry objects as input

        with patch.object(utils, 'AreaNotFound', Exception):
            mock = MagicMock(side_effect=[utils.AreaNotFound("raise"),
                                          MagicMock()])
            with patch.object(utils, 'parse_area_file', mock):
                in_area = geometry.AreaDefinition()
                self.proj = Projector(in_area, out_area_id)
                print self.proj.in_area
                self.assertEquals(self.proj.in_area, in_area)

        in_area = geometry.SwathDefinition()
        utils.parse_area_file.return_value.__getitem__.side_effect = [AttributeError, out_area_id]
        self.proj = Projector(in_area, out_area_id)
        self.assertEquals(self.proj.in_area, in_area)

        out_area = geometry.AreaDefinition()
        utils.parse_area_file.return_value.__getitem__.side_effect = [in_area_id, AttributeError]
        self.proj = Projector(in_area_id, out_area)
        self.assertEquals(self.proj.out_area, out_area)

        # in case of lon/lat is input

        utils.parse_area_file.return_value.__getitem__.side_effect = [AttributeError, out_area_id]
        lonlats = ("great_lons", "even_greater_lats")

        self.proj = Projector("raise", out_area_id, lonlats)
        geometry.SwathDefinition.assert_called_with(lons=lonlats[0],
                                                    lats=lonlats[1])

        utils.parse_area_file.return_value.__getitem__.side_effect = None
        # in case of wrong mode

        self.assertRaises(ValueError,
                          Projector,
                          random_string(20),
                          random_string(20),
                          mode=random_string(20))

        utils.parse_area_file.return_value.__getitem__.side_effect = ["a", "b",
                                                                      "c", "d"]
        gqla.side_effect = [("ridx", "cidx")]
        # quick mode cache
        self.proj = Projector(in_area_id, out_area_id, mode="quick")
        cache = getattr(self.proj, "_cache")
        self.assertTrue(cache['row_idx'] is not None)
        self.assertTrue(cache['col_idx'] is not None)

        # nearest mode cache

        self.proj = Projector(in_area_id, out_area_id, mode="nearest")
        cache = getattr(self.proj, "_cache")
        self.assertTrue(cache['valid_index'] is not None)
        self.assertTrue(cache['valid_output_index'] is not None)
        self.assertTrue(cache['index_array'] is not None)

Example 147

Project: pandashells Source File: arg_lib_tests.py
    def test_decorating_adder_active(self):
        """
        _decorating_adder() adds proper arguments
        """
        # set up mock parser
        parser = MagicMock()
        group = MagicMock()
        group.add_argument = MagicMock()
        parser.add_argument_group = MagicMock(return_value=group)

        # create a list of expected call signatures
        calls = []

        context_list = [t for t in config_lib.CONFIG_OPTS if
                        t[0] == 'plot_context'][0][1]
        theme_list = [t for t in config_lib.CONFIG_OPTS if
                      t[0] == 'plot_theme'][0][1]
        palette_list = [t for t in config_lib.CONFIG_OPTS if
                        t[0] == 'plot_palette'][0][1]

        msg = "Set the x-limits for the plot"
        calls.append(call('--xlim', nargs=2, type=float, dest='xlim',
                          metavar=('XMIN', 'XMAX'), help=msg))

        msg = "Set the y-limits for the plot"
        calls.append(call('--ylim', nargs=2, type=float, dest='ylim',
                          metavar=('YMIN', 'YMAX'), help=msg))

        msg = "Draw x axis with log scale"
        calls.append(call(
            '--xlog', action='store_true', dest='xlog', default=False, help=msg
        ))

        msg = "Draw y axis with log scale"
        calls.append(call(
            '--ylog', action='store_true', dest='ylog', default=False, help=msg
        ))

        msg = "Set the x-label for the plot"
        calls.append(call('--xlabel', nargs=1, type=str, dest='xlabel',
                          help=msg))

        msg = "Set the y-label for the plot"
        calls.append(call('--ylabel', nargs=1, type=str, dest='ylabel',
                          help=msg))

        msg = "Set the title for the plot"
        calls.append(call('--title', nargs=1, type=str, dest='title', help=msg))

        msg = "Specify legend location"
        calls.append(call('--legend', nargs=1, type=str, dest='legend',
                          choices=['1', '2', '3', '4', 'best'], help=msg))

        msg = "Specify whether hide the grid or not"
        calls.append(call('--nogrid', action='store_true', dest='no_grid',
                          default=False, help=msg))

        msg = "Specify plot context. Default = '{}' ".format(context_list[0])
        calls.append(call('--context', nargs=1, type=str, dest='plot_context',
                          default=[context_list[0]], choices=context_list,
                          help=msg))

        msg = "Specify plot theme. Default = '{}' ".format(theme_list[0])
        calls.append(call('--theme', nargs=1,
                          type=str, dest='plot_theme', default=[theme_list[0]],
                          choices=theme_list, help=msg))

        msg = "Specify plot palette. Default = '{}' ".format(palette_list[0])
        calls.append(call('--palette', nargs=1, type=str, dest='plot_palette',
                          default=[palette_list[0]], choices=palette_list,
                          help=msg))

        msg = "Save the figure to this file"
        calls.append(call('--savefig', nargs=1, type=str, help=msg))

        # run the code under test
        args = ['decorating']
        arg_lib._decorating_adder(parser, *args)

        # make sure proper calls were made
        self.assertEqual(group.add_argument.call_args_list, calls)

Example 148

Project: jumpgate Source File: test_volumes.py
def set_SL_client(req, operation=OP_CODE['GOOD_PATH']['SIMPLE']):
    if operation == OP_CODE['GOOD_PATH']['SIMPLE']:
        # simple good path testing, use default sl_client
        return
    elif operation == OP_CODE['BAD_PATH']['VIRT_DISK_IMG_OBJ_INVALID']:
        # Virtual_Disk_Image.getObject failure.
        req.env['sl_client']['Virtual_Disk_Image'].getObject = \
            mock.MagicMock(side_effect=
                           SoftLayer.SoftLayerAPIError(400,
                                                       "MockFault",
                                                       None))
    elif operation == OP_CODE['BAD_PATH']['GET_VIRT_DISK_IMGS_API']:
        # getVirtualDiskImages() SLAPI failure
        setattr(req.env['sl_client']['Account'],
                'getVirtualDiskImages',
                mock.MagicMock(side_effect=
                               SoftLayer.SoftLayerAPIError(400,
                                                           "MockFault",
                                                           None)))
    elif operation == OP_CODE['GOOD_PATH']['RET_VIRT_DISK_IMGS']:
        def _return_disk_imgs(*args, **kwargs):
            return [
                {'typeId': volumes.VIRTUAL_DISK_IMAGE_TYPE['SYSTEM'],
                 'blockDevices': [mock.MagicMock()],
                 'localDiskFlag': False,
                 },
                {'typeId': volumes.VIRTUAL_DISK_IMAGE_TYPE['SWAP'],
                 'blockDevices': [mock.MagicMock()],
                 'localDiskFlag': False,
                 }]
        setattr(req.env['sl_client']['Account'],
                'getVirtualDiskImages',
                mock.MagicMock(side_effect=_return_disk_imgs))
    elif operation == OP_CODE['GOOD_PATH']['RET_VIRT_DISK_IMG']:
        def _return_disk_img(*args, **kwargs):
            return {'typeId': volumes.VIRTUAL_DISK_IMAGE_TYPE['SYSTEM'],
                    'blockDevices': [mock.MagicMock()],
                    'localDiskFlag': False, }
        req.env['sl_client']['Virtual_Disk_Image'].getObject = \
            mock.MagicMock(side_effect=_return_disk_img)
    elif operation == OP_CODE['BAD_PATH']['RET_BAD_VIRT_GUEST']:
        def _return_disk_img_1(*args, **kwargs):
            return {
                'typeId': volumes.VIRTUAL_DISK_IMAGE_TYPE['SYSTEM'],
                'blockDevices': [{
                    'guestId': GUEST_ID,
                    'diskImageId': DISK_IMG_ID,
                    'device': BLKDEV_MOUNT_ID,
                }],
            }
        req.env['sl_client']['Virtual_Disk_Image'].getObject = \
            mock.MagicMock(side_effect=_return_disk_img_1)
        req.env['sl_client']['Virtual_Guest'].getObject = \
            mock.MagicMock(side_effect=
                           SoftLayer.SoftLayerAPIError(400,
                                                       "MockFault",
                                                       None))
    elif operation == OP_CODE['GOOD_PATH']['RET_VIRT_DISK_BILL']:
        def _return_billing_item(*args, **kwargs):
            return {'billingItem': mock.MagicMock()}
        req.env['sl_client']['Virtual_Disk_Image'].getObject = \
            mock.MagicMock(side_effect=_return_billing_item)
    elif operation == OP_CODE['BAD_PATH']['RET_VIRT_DISK_EXCP']:
        req.env['sl_client']['Virtual_Disk_Image'].getObject = \
            mock.MagicMock(side_effect=
                           SoftLayer.SoftLayerAPIError(400,
                                                       "MockFault",
                                                       None))
    elif operation == OP_CODE['GOOD_PATH']['CREATE_VOLUME']:
        def _return_all_objects(*args, **kwargs):
            return [{'name': 'Portable Storage',
                     'isActive': 1,
                     'id': PROD_PKG_ID}]

        def _return_prices(*args, **kwargs):
            return [{'id': PROD_PKG_ID,
                     'capacity': DISK_CAPACITY,
                     'prices': [{'id': PRICE_ID}]}]

        def _return_disk_img_2(*args, **kwargs):
            return {
                'typeId': volumes.VIRTUAL_DISK_IMAGE_TYPE['SYSTEM'],
                'blockDevices': [{
                    'guestId': GUEST_ID,
                    'diskImageId': DISK_IMG_ID,
                    'device': BLKDEV_MOUNT_ID,
                }],
            }

        req.env['sl_client']['Product_Package'].getAllObjects = \
            mock.MagicMock(side_effect=_return_all_objects)
        req.env['sl_client']['Product_Package'].getItems = \
            mock.MagicMock(side_effect=_return_prices)
        req.env['sl_client']['Location_Datacenter'].getDatacenters = \
            mock.MagicMock(return_value=[{'name': DATACENTER_NAME,
                                         'id': DATACENTER_ID}])
        req.env['sl_client']['Billing_Order'].getOrderTopLevelItems = \
            mock.MagicMock(
                return_value=[{'billingItem': {'resourceTableId':
                                               DISK_IMG_ID}}])
        req.env['sl_client']['Virtual_Disk_Image'].getObject = \
            mock.MagicMock(side_effect=_return_disk_img_2)
        req.env['sl_client']['Product_Order'].placeOrder = \
            mock.MagicMock(return_value={'orderId': ORDERID})

Example 149

Project: paasta Source File: test_cmds_rerun.py
@mark.parametrize('test_case', [
    [
        ['a_service', 'instance1', 'cluster1', _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        'successfully created job',
    ],
    [
        ['a_service', 'instance1', 'cluster1', None],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        'successfully created job',
    ],
    [
        ['a_service', 'instance1', 'cluster1', None],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, True,
        'please supply a `--execution_date` argument',  # job uses time variables interpolation
    ],
    [
        ['a_service', 'instance1', 'cluster1,cluster2', _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        'successfully created job',
    ],
    [
        ['a_service', 'instance1', None, _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        'cluster: cluster1',  # success
    ],
    [
        ['a_service', 'instance1', 'cluster3', _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        '"cluster3" does not look like a valid cluster',
    ],
    [
        ['a_service', 'instance1', 'cluster2', _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        'service "a_service" has not been deployed to "cluster2" yet',
    ],
    [
        ['a_service', 'instanceX', 'cluster1', _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        'instance "instanceX" is either invalid',
    ],
    [
        ['a_service', 'instance2', 'cluster1', _user_supplied_execution_date],
        'a_service',
        _list_clusters, _actual_deployments, _planned_deployments, False,
        ' or has not been deployed to "cluster1" yet',
    ]
])
def test_rerun_validations(test_case):
    with contextlib.nested(
        patch('sys.stdout', new_callable=StringIO, autospec=None),
        patch('paasta_tools.cli.cmds.rerun.figure_out_service_name', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.list_clusters', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.get_actual_deployments', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.get_planned_deployments', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.execute_chronos_rerun_on_remote_master', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.chronos_tools.load_chronos_job_config', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.chronos_tools.uses_time_variables', autospec=True),
        patch('paasta_tools.cli.cmds.rerun._get_default_execution_date', autospec=True),
        patch('paasta_tools.cli.cmds.rerun.load_system_paasta_config', autospec=True),
    ) as (
        mock_stdout,
        mock_figure_out_service_name,
        mock_list_clusters,
        mock_get_actual_deployments,
        mock_get_planned_deployments,
        mock_execute_rerun_remote,
        mock_load_chronos_job_config,
        mock_uses_time_variables,
        mock_get_default_execution_date,
        mock_load_system_paasta_config,
    ):

        (rerun_args,
         mock_figure_out_service_name.return_value,
         mock_list_clusters.return_value,
         mock_get_actual_deployments.return_value,
         mock_get_planned_deployments.return_value,
         mock_uses_time_variables.return_value,
         expected_output) = test_case

        mock_load_chronos_job_config.return_value = {}
        default_date = datetime.datetime(2002, 2, 2, 2, 2, 2, 2)
        mock_get_default_execution_date.return_value = default_date
        mock_execute_rerun_remote.return_value = (0, '')
        mock_load_system_paasta_config.return_value = SystemPaastaConfig({}, '/fake/config')

        args = MagicMock()
        args.service = rerun_args[0]
        args.instance = rerun_args[1]
        args.clusters = rerun_args[2]
        if rerun_args[3]:
            args.execution_date = datetime.datetime.strptime(rerun_args[3], EXECUTION_DATE_FORMAT)
        else:
            args.execution_date = None
        args.verbose = 0

        paasta_rerun(args)

        # No --execution_date argument, but that's ok: the job doesn't use time vars interpolation.
        # Check if the backend rerun command was called with the default date.
        if args.execution_date is None and not mock_uses_time_variables.return_value:
            assert mock_execute_rerun_remote.call_args[1]['execution_date'] \
                == default_date.strftime(EXECUTION_DATE_FORMAT)

        # The job does use time vars interpolation. Make sure the User supplied date was used.
        if args.execution_date is not None and mock_uses_time_variables.return_value:
            assert mock_execute_rerun_remote.call_args[1]['execution_date'] == _user_supplied_execution_date

        output = mock_stdout.getvalue()
        assert expected_output in output

Example 150

Project: barman Source File: test_backup.py
    def test_check_redundancy(self, tmpdir):
        """
        Test the check method
        """
        # Setup temp dir and server
        # build a backup_manager and setup a basic configuration
        backup_manager = build_backup_manager(
            name='TestServer',
            global_conf={
                'barman_home': tmpdir.strpath,
                'minimum_redundancy': "1"
            })
        backup_manager.executor = mock.MagicMock()

        # Test the unsatisfied minimum_redundancy option
        strategy_mock = mock.MagicMock()
        backup_manager.check(strategy_mock)
        # Expect a failure from the method
        strategy_mock.result.assert_called_with(
            'TestServer',
            'minimum redundancy requirements',
            False,
            'have 0 backups, expected at least 1'
        )
        # Test the satisfied minimum_redundancy option
        b_info = build_test_backup_info(
            backup_id='fake_backup_id',
            server=backup_manager.server,
        )
        b_info.save()

        strategy_mock.reset_mock()
        backup_manager._load_backup_cache()
        backup_manager.check(strategy_mock)
        # Expect a success from the method
        strategy_mock.result.assert_called_with(
            'TestServer',
            'minimum redundancy requirements',
            True,
            'have 1 backups, expected at least 1'
        )

        # Test for no failed backups
        strategy_mock.reset_mock()
        backup_manager._load_backup_cache()
        backup_manager.check(strategy_mock)
        # Expect a failure from the method
        strategy_mock.result.assert_any_call(
            'TestServer',
            'failed backups',
            True,
            'there are 0 failed backups'
        )

        # Test for failed backups in catalog
        b_info = build_test_backup_info(
            backup_id='failed_backup_id',
            server=backup_manager.server,
            status=BackupInfo.FAILED,
        )
        b_info.save()
        strategy_mock.reset_mock()
        backup_manager._load_backup_cache()
        backup_manager.check(strategy_mock)
        # Expect a failure from the method
        strategy_mock.result.assert_any_call(
            'TestServer',
            'failed backups',
            False,
            'there are 1 failed backups'
        )

        # Test unknown compression
        backup_manager.config.compression = 'test_compression'
        backup_manager.compression_manager.check.return_value = False
        strategy_mock.reset_mock()
        backup_manager.check(strategy_mock)
        # Expect a failure from the method
        strategy_mock.result.assert_any_call(
            'TestServer',
            'compression settings',
            False
        )

        # Test valid compression
        backup_manager.config.compression = 'test_compression'
        backup_manager.compression_manager.check.return_value = True
        strategy_mock.reset_mock()
        backup_manager.check(strategy_mock)
        # Expect a success from the method
        strategy_mock.result.assert_any_call(
            'TestServer',
            'compression settings',
            True
        )
        # Test failure retrieving a compressor
        backup_manager.config.compression = 'test_compression'
        backup_manager.compression_manager.check.return_value = True
        backup_manager.compression_manager.get_compressor.side_effect = \
            CompressionIncompatibility()
        strategy_mock.reset_mock()
        backup_manager.check(strategy_mock)
        # Expect a failure from the method
        strategy_mock.result.assert_any_call(
            'TestServer',
            'compression settings',
            False
        )
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Selected Page 4