mock.patch.object

Here are the examples of the python api mock.patch.object taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

200 Examples 7

Example 1

Project: ansible-kong-module
Source File: test_kong.py
View license
	@mock.patch.object(ModuleHelper, 'get_response')
	@mock.patch.object(AnsibleModule, 'exit_json')
	@mock.patch.object(KongAPI, 'list')
	@mock.patch.object(ModuleHelper, 'get_module')
	@mock.patch.object(ModuleHelper, 'prepare_inputs')
	def test_main_add(self, mock_prepare_inputs, mock_module, mock_list, mock_exit_json, mock_get_response):

		mock_prepare_inputs.return_value = (mock_kong_admin_url, {}, "list")
		mock_get_response.return_value = (True, {})
		main()

		assert mock_list.called			

Example 2

Project: mycroft
Source File: test_worker.py
View license
    def test_run_results_exception(self, get_worker):
        worker = self.setup_run_test(get_worker)

        @mock.patch.object(worker, '_get_sqs_wrapper', return_value=self.sqs, autospec=True)
        @mock.patch.object(worker, '_update_scheduled_jobs', autospec=True)
        @mock.patch.object(worker, '_process_msg', side_effect=Exception(), autospec=True)
        @mock.patch.object(worker.emailer, 'mail_result', autospec=True)
        def execute(*mocks):
            with mock.patch(
                'mycroft.backend.worker.base_worker.log_exception',
                autospec=True
            ) as mock_log:
                worker.run()
                assert mock_log.call_args is not None
        execute()

Example 3

View license
    def test_pushitems(self):
        with contextlib.nested(
            mock.patch.object(PushItemsServlet, "get_current_user", return_value=self.fake_request_data["user"]),
            mock.patch.object(PushItemsServlet, "async_api_call", side_effect=self.mocked_api_call),
            mock.patch.object(self, "api_response", return_value=self.fake_requests_response)
        ):
            self.fetch("/pushitems?push=%d" % self.fake_request_data["id"])
            response = self.wait()
            T.assert_in(self.fake_request_data["title"], response.body)

Example 4

Project: CloudFerry
Source File: test_stage.py
View license
    @mock.patch.object(StageOne, '_invalidate')
    @mock.patch.object(StageOne, '_execute')
    @mock.patch.object(StageTwo, '_execute')
    def test_invalidate_dependencies_on_configuration_change(
            self, execute_two, execute_one, invalidate_one):
        stage.execute_stage(fqname(StageOne), self.config1)
        stage.execute_stage(fqname(StageTwo), self.config2)
        execute_one.assert_has_calls([
            mock.call(self.config1, False),
            mock.call(self.config2, True),
        ])
        execute_two.assert_called_once_with(self.config2, False)
        invalidate_one.assert_called_once_with(self.config1, self.config2)

Example 5

Project: bottle-utils
Source File: test_i18n.py
View license
@mock.patch.object(mod, 'quoted_url')
@mock.patch.object(mod, 'request')
@mock.patch.object(mod, 'i18n_path')
def test_i18n_path_calls_get_url(i18n_path, req, quoted_url):
    s = mod.i18n_url('foo', bar=2)
    s._eval()
    quoted_url.assert_called_once_with('foo', bar=2)

Example 6

Project: mirocommunity
Source File: test_imports.py
View license
    def test_update(self):
        """
        SavedSearch.update() should create new Video objects linked to
        the search. Updating a second time shouldn't re-add videos.
        """
        search = self.create_search('blah rocket')
        self.assertEqual(search.video_set.count(), 0)
        with mock.patch.object(VidscraperVideo, 'load', self._load):
            with mock.patch.object(vidscraper, 'auto_search', self._search):
                with mock.patch.object(video_save_thumbnail, 'delay'):
                    search.update()
        self.assertEqual(search.video_set.count(), 5)
        with mock.patch.object(VidscraperVideo, 'load', self._load):
            with mock.patch.object(vidscraper, 'auto_search', self._search):
                with mock.patch.object(video_save_thumbnail, 'delay'):
                    search.update()
        self.assertEqual(search.video_set.count(), 5)

Example 7

Project: pypowervm
Source File: test_adapter.py
View license
    def test_event_listener(self):

        with mock.patch.object(adp._EventListener, '_get_events') as m_events,\
                mock.patch.object(adp, '_EventPollThread') as mock_poll:
            # With some fake events, event listener can be initialized
            self.sess._sessToken = 'token'.encode('utf-8')
            m_events.return_value = {'general': 'init'}, []
            event_listen = self.sess.get_event_listener()
            self.assertIsNotNone(event_listen)

            # Register the fake handlers and ensure they are called
            evh = mock.Mock(spec=adp.EventHandler, autospec=True)
            raw_evh = mock.Mock(spec=adp.RawEventHandler, autospec=True)
            event_listen.subscribe(evh)
            event_listen.subscribe(raw_evh)
            events, raw_events = event_listen._get_events()
            event_listen._dispatch_events(events, raw_events)
            self.assertTrue(evh.process.called)
            self.assertTrue(raw_evh.process.called)
            self.assertTrue(mock_poll.return_value.start.called)

            # Ensure getevents() gets legacy events
            self.assertEqual({'general': 'init'}, event_listen.getevents())

        # Outside our patching of _get_events, get the formatted events
        with mock.patch.object(event_listen, '_format_events') as mock_format,\
                mock.patch.object(event_listen.adp, 'read') as mock_read:

            # Fabricate some mock entries, so format gets called.
            mock_read.return_value.feed.entries = (['entry'])

            self.assertEqual(({}, []), event_listen._get_events())
            self.assertTrue(mock_read.called)
            self.assertTrue(mock_format.called)

        # Test _format_events
        event_data = [
            {
                'EventType': 'NEW_CLIENT',
                'EventData': 'href1',
                'EventID': '1',
                'EventDetail': 'detail',
            },
            {
                'EventType': 'CACHE_CLEARED',
                'EventData': 'href2',
                'EventID': '2',
                'EventDetail': 'detail2',
            },
            {
                'EventType': 'ADD_URI',
                'EventData': 'LPAR1',
                'EventID': '3',
                'EventDetail': 'detail3',
            },
            {
                'EventType': 'DELETE_URI',
                'EventData': 'LPAR1',
                'EventID': '4',
                'EventDetail': 'detail4',
            },
            {
                'EventType': 'INVALID_URI',
                'EventData': 'LPAR1',
                'EventID': '4',
                'EventDetail': 'detail4',
            },
        ]

        # Setup a side effect that returns events from the test data.
        def get_event_data(item):
            data = event_data[0][item]
            if item == 'EventDetail':
                event_data.pop(0)
            return data

        # Raw events returns a sequence the same as the test data
        raw_result = copy.deepcopy(event_data)
        # Legacy events overwrites some events.
        dict_result = {'general': 'invalidate', 'LPAR1': 'delete'}

        # Build a mock entry
        entry = mock.Mock()
        entry.element.findtext.side_effect = get_event_data
        events = {}
        raw_events = []
        x = len(raw_result)
        while x:
            x -= 1
            event_listen._format_events(entry, events, raw_events)
        self.assertEqual(raw_result, raw_events)
        self.assertEqual(dict_result, events)

Example 8

Project: django-cache
Source File: tests.py
View license
    @test.utils.override_settings(MIDDLEWARE_CLASSES=[__name__ + '.UpdateVaryMiddleware'])
    def test_with_vary_changed_by_middleware(self):
        client = test.Client()

        # Sun, 17 Jul 2016 10:00:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749600):
            response = client.get(
                reverse('default'),
                HTTP_HEADER='header1',
            )
            mocked_response.assert_called_once()
            self.assertNotIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Vary', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertEqual('Header', response['Vary'])
            self.assertEqual('Sun, 17 Jul 2016 10:10:00 GMT', response['Expires'])
            self.assertEqual('max-age=600', response['Cache-Control'])
            mocked_response.reset_mock()

        # Sun, 17 Jul 2016 10:05:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749900):
            response = client.get(
                reverse('default'),
                HTTP_HEADER='header1',
            )
            mocked_response.assert_not_called()
            self.assertNotIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Vary', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertIn('Age', response)
            self.assertEqual('Header', response['Vary'])
            self.assertEqual('Sun, 17 Jul 2016 10:10:00 GMT', response['Expires'])
            self.assertEqual('max-age=600', response['Cache-Control'])
            self.assertEqual('300', response['Age'])
            mocked_response.reset_mock()

        # Sun, 17 Jul 2016 10:05:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749900):
            response = client.get(
                reverse('default'),
                HTTP_HEADER='header2',
            )
            mocked_response.assert_called_once()
            self.assertNotIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Vary', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertEqual('Header', response['Vary'])
            self.assertEqual('Sun, 17 Jul 2016 10:15:00 GMT', response['Expires'])
            self.assertEqual('max-age=600', response['Cache-Control'])
            mocked_response.reset_mock()

        # Sun, 17 Jul 2016 10:05:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749900):
            response = client.get(
                reverse('default'),
            )
            mocked_response.assert_called_once()
            self.assertNotIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Vary', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertEqual('Header', response['Vary'])
            self.assertEqual('Sun, 17 Jul 2016 10:15:00 GMT', response['Expires'])
            self.assertEqual('max-age=600', response['Cache-Control'])

Example 9

Project: django-cache
Source File: tests.py
View license
    def test_with_etag(self):
        client = test.Client()

        # conditional request without cache -- not modified
        # Sun, 17 Jul 2016 10:00:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749600):
            response = client.get(
                reverse('cache_with_etag'),
                HTTP_IF_NONE_MATCH='etag',
            )
            mocked_response.assert_not_called()
            self.assertEqual(304, response.status_code)
            self.assertIn('Vary', response)
            self.assertIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertEqual('If-None-Match', response['Vary'])
            self.assertEqual('"etag"', response['ETag'])
            self.assertEqual('Mon, 18 Jul 2016 10:00:00 GMT', response['Expires'])
            self.assertEqual('max-age=86400', response['Cache-Control'])
            mocked_response.reset_mock()

        # Request once -- generate cache
        # Sun, 17 Jul 2016 10:00:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749600):
            response = client.get(
                reverse('cache_with_etag'),
                HTTP_IF_NONE_MATCH='another_etag',
            )
            mocked_response.assert_called_once()
            self.assertEqual(200, response.status_code)
            self.assertIn('Vary', response)
            self.assertIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertEqual('If-None-Match', response['Vary'])
            self.assertEqual('"etag"', response['ETag'])
            self.assertEqual('Mon, 18 Jul 2016 10:00:00 GMT', response['Expires'])
            self.assertEqual('max-age=86400', response['Cache-Control'])
            mocked_response.reset_mock()

        # repeat request with precondition -- hit cache
        # Sun, 17 Jul 2016 10:05:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749900):
            response = client.get(
                reverse('cache_with_etag'),
                HTTP_IF_NONE_MATCH='another_etag',
            )
            mocked_response.assert_not_called()
            self.assertEqual(200, response.status_code)
            self.assertIn('Vary', response)
            self.assertIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertIn('Age', response)
            self.assertEqual('If-None-Match', response['Vary'])
            self.assertEqual('"etag"', response['ETag'])
            self.assertEqual('Mon, 18 Jul 2016 10:00:00 GMT', response['Expires'])
            self.assertEqual('max-age=86400', response['Cache-Control'])
            self.assertEqual('300', response['Age'])
            mocked_response.reset_mock()

        # expired request with precondition -- miss cache
        # Sun, 17 Jul 2016 10:00:00 GMT
        with mock.patch.object(time, 'time', return_value=1468749600):
            response = client.get(
                reverse('cache_with_etag'),
                HTTP_IF_NONE_MATCH='yet_another_etag',
            )
            mocked_response.assert_called_once()
            self.assertEqual(200, response.status_code)
            self.assertIn('Vary', response)
            self.assertIn('ETag', response)
            self.assertNotIn('Last-Modified', response)
            self.assertIn('Expires', response)
            self.assertIn('Cache-Control', response)
            self.assertEqual('If-None-Match', response['Vary'])
            self.assertEqual('"etag"', response['ETag'])
            self.assertEqual('Mon, 18 Jul 2016 10:00:00 GMT', response['Expires'])
            self.assertEqual('max-age=86400', response['Cache-Control'])
            mocked_response.reset_mock()

Example 10

Project: fabricio
Source File: test_postgres.py
View license
    @mock.patch.object(fab, 'get')
    @mock.patch.object(fab, 'put')
    @mock.patch.object(files, 'exists', return_value=True)
    def test_update(self, exists, *args):
        cases = dict(
            updated_without_config_change=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('rm -f /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('rm -f /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                ],
                update_kwargs=dict(),
                parent_update_returned=True,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            no_change=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[],
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=False,
            ),
            no_change_with_tag=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[],
                update_kwargs=dict(tag='tag'),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag='tag', registry=None),
                expected_result=False,
            ),
            no_change_with_registry=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[],
                update_kwargs=dict(registry='registry'),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry='registry'),
                expected_result=False,
            ),
            no_change_with_tag_and_registry=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[],
                update_kwargs=dict(tag='tag', registry='registry'),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag='tag', registry='registry'),
                expected_result=False,
            ),
            forced=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('rm -f /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('rm -f /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                ],
                update_kwargs=dict(force=True),
                parent_update_returned=True,
                expected_update_kwargs=dict(force=True, tag=None, registry=None),
                expected_result=True,
            ),
            pg_hba_changed=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker kill --signal HUP name'),
                    mock.call('docker inspect --type container name_backup'),
                    mock.call('docker rm name_backup'),
                    mock.call('docker rmi image_id', ignore_errors=True),
                    mock.call('rm -f /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult('[{"Image": "image_id"}]'),
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult(),
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            pg_hba_changed_backup_container_not_found=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker kill --signal HUP name'),
                    mock.call('docker inspect --type container name_backup'),
                    mock.call('rm -f /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    RuntimeError,
                    SucceededResult(),
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            pg_hba_changed_container_updated=dict(
                db_exists=True,
                old_configs=[
                    'postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('rm -f /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                ],
                update_kwargs=dict(),
                parent_update_returned=True,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            main_conf_changed=dict(
                db_exists=True,
                old_configs=[
                    'old_postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker restart --time 30 name'),
                    mock.call('docker inspect --type container name_backup'),
                    mock.call('docker rm name_backup'),
                    mock.call('docker rmi image_id', ignore_errors=True),
                    mock.call('rm -f /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult('[{"Image": "image_id"}]'),
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult(),
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            main_conf_changed_backup_container_not_found=dict(
                db_exists=True,
                old_configs=[
                    'old_postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker restart --time 30 name'),
                    mock.call('docker inspect --type container name_backup'),
                    mock.call('rm -f /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    RuntimeError,
                    SucceededResult(),
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            main_conf_changed_container_updated=dict(
                db_exists=True,
                old_configs=[
                    'old_postgresql.conf',
                    'pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('rm -f /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                ],
                update_kwargs=dict(),
                parent_update_returned=True,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            configs_changed=dict(
                db_exists=True,
                old_configs=[
                    'old_postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker restart --time 30 name'),
                    mock.call('docker inspect --type container name_backup'),
                    mock.call('docker rm name_backup'),
                    mock.call('docker rmi image_id', ignore_errors=True),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult('[{"Image": "image_id"}]'),
                    SucceededResult(),
                    SucceededResult(),
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            configs_changed_backup_container_not_found=dict(
                db_exists=True,
                old_configs=[
                    'old_postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker restart --time 30 name'),
                    mock.call('docker inspect --type container name_backup'),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult(),
                    RuntimeError,
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            configs_changed_container_updated=dict(
                db_exists=True,
                old_configs=[
                    'old_postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                ],
                update_kwargs=dict(),
                parent_update_returned=True,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
            from_scratch=dict(
                db_exists=False,
                old_configs=[
                    'old_postgresql.conf',
                    'old_pg_hba.conf',
                ],
                expected_commands=[
                    mock.call('docker run --volume /data:/data --stop-signal INT --rm --tty --interactive image:tag postgres --version', quiet=False),
                    mock.call('mv /data/postgresql.conf /data/postgresql.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('mv /data/pg_hba.conf /data/pg_hba.conf.backup', ignore_errors=True, sudo=True),
                    mock.call('docker restart --time 30 name'),
                    mock.call('docker inspect --type container name_backup'),
                ],
                side_effect=(
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult(),
                    SucceededResult(),
                    RuntimeError,
                ),
                update_kwargs=dict(),
                parent_update_returned=False,
                expected_update_kwargs=dict(force=False, tag=None, registry=None),
                expected_result=True,
            ),
        )
        for case, data in cases.items():
            with self.subTest(case=case):
                postgres.open.side_effect = (
                    six.BytesIO('postgresql.conf'),
                    six.BytesIO('pg_hba.conf'),
                )
                container = TestContainer(
                    name='name',
                    options=dict(volumes='/data:/data'),
                )
                with mock.patch.object(
                    fabricio,
                    'run',
                    side_effect=data.get('side_effect'),
                ) as run:
                    with mock.patch.object(
                        container,
                        'db_exists',
                        return_value=data['db_exists'],
                    ):
                        with mock.patch.object(
                            docker.Container,
                            'update',
                            return_value=data['parent_update_returned'],
                        ) as update:
                            with mock.patch.object(
                                six.BytesIO,
                                'getvalue',
                                side_effect=data['old_configs'],
                            ):
                                result = container.update(**data['update_kwargs'])
                                self.assertListEqual(run.mock_calls, data['expected_commands'])
                                self.assertEqual(result, data['expected_result'])
                                update.assert_called_once_with(**data['expected_update_kwargs'])

Example 11

Project: fabricio
Source File: test_postgres.py
View license
    @mock.patch.object(postgres.StreamingReplicatedPostgresqlContainer, 'db_exists')
    @mock.patch.object(files, 'exists')
    @mock.patch.object(fabricio, 'run')
    def test_update_recovery_config(self, run, recovery_exists, db_exists):
        cases = dict(
            master=dict(
                db_exists=True,
                recovery_exists=False,
                host='master',
                expected_master_host='master',
                expected_result=False,
                expected_commands=[],
            ),
            slave=dict(
                db_exists=True,
                recovery_exists=True,
                host='slave',
                expected_master_host='master',
                expected_result=True,
                set_master='master',
                expected_recovery_conf="primary_conninfo = 'host=master port=5432 user=postgres'\n",
                expected_commands=[],
            ),
            slave_with_existing_recovery_conf=dict(
                db_exists=True,
                recovery_exists=True,
                host='slave',
                expected_master_host='master',
                expected_result=True,
                set_master='master',
                old_recovery_conf=(
                    "custom_setting = 'custom_setting'\n"
                    "primary_conninfo = 'host=old_master port=5432 user=postgres'\n"
                    "custom_setting2 = 'custom_setting2'\n"
                ),
                expected_recovery_conf=(
                    "custom_setting = 'custom_setting'\n"
                    "custom_setting2 = 'custom_setting2'\n"
                    "primary_conninfo = 'host=master port=5432 user=postgres'\n"
                ),
                expected_commands=[],
            ),
            new_slave=dict(
                db_exists=False,
                recovery_exists=False,
                host='slave',
                expected_master_host='master',
                expected_result=True,
                set_master='master',
                expected_recovery_conf="primary_conninfo = 'host=master port=5432 user=postgres'\n",
                expected_commands=[
                    mock.call("docker run --volume /data:/data --stop-signal INT --rm --tty --interactive image:latest /bin/bash -c 'pg_basebackup --progress --write-recovery-conf --xlog-method=stream --pgdata=$PGDATA --host=master --username=postgres --port=5432'", quiet=False),
                ],
            ),
            master_promotion_from_scratch=dict(
                db_exists=False,
                recovery_exists=False,
                host='new_master',
                expected_master_host='new_master',
                expected_result=False,
                expected_commands=[],
            ),
            master_promotion=dict(
                db_exists=True,
                recovery_exists=True,
                host='new_master',
                expected_master_host='new_master',
                expected_result=True,
                expected_commands=[
                    mock.call('mv /data/recovery.conf /data/recovery.conf.backup', ignore_errors=False, sudo=True),
                ],
                init_kwargs=dict(pg_recovery_master_promotion_enabled=True),
            ),
        )
        for case, data in cases.items():
            with self.subTest(case=case):
                run.reset_mock()
                postgres.open = mock.MagicMock(
                    return_value=six.BytesIO(data.get('old_recovery_conf', '')),
                )
                db_exists.return_value = data['db_exists']
                recovery_exists.return_value = data['recovery_exists']
                fab.env.host = data['host']
                container = postgres.StreamingReplicatedPostgresqlContainer(
                    name='name', image='image', pg_data='/data',
                    options=dict(volumes='/data:/data'),
                    **data.get('init_kwargs', {})
                )
                if 'set_master' in data:
                    container.multiprocessing_data.master = data['set_master']
                    container.master_obtained.set()
                with mock.patch.object(container, 'update_config', return_value=True) as update_config:
                    result = container.update_recovery_config()
                    self.assertEqual(result, data['expected_result'])
                    self.assertEqual(container.multiprocessing_data.master, data['expected_master_host'])
                    self.assertListEqual(run.mock_calls, data['expected_commands'])
                    if 'expected_recovery_conf' in data:
                        update_config.assert_called_once_with(
                            content=data['expected_recovery_conf'],
                            path='/data/recovery.conf',
                        )

Example 12

View license
    @mock.patch.object(Configuration, '_check_directory')
    @mock.patch.object(Configuration, '_configure_logging')
    @mock.patch.object(Configuration, '_set_logging_level')
    @mock.patch.object(Configuration, 'save_config')
    @mock.patch.object(batchapps.config.os.path, 'isfile')
    @mock.patch.object(batchapps.config.configparser.RawConfigParser, 'read')
    def test_config_set_defaults(self,
                                 mock_read,
                                 mock_file,
                                 mock_save,
                                 mock_level,
                                 mock_logging,
                                 mock_dir):
        """Test _set_defaults"""

        mock_dir.return_value = False
        mock_logging.return_value = logging.getLogger("defaults")
        mock_file.return_value = False

        cfg = Configuration(default=True)
        self.assertTrue(mock_save.called)
        self.assertFalse(mock_read.called)
        self.assertFalse(mock_file.called)
        mock_logging.assert_called_with(
            os.path.join(self.userdir, "BatchAppsData"))

        mock_level.assert_called_with(30)
        self.assertEqual(sorted(cfg._config.sections()),
                         sorted(["Authentication",
                                 "Blender",
                                 "Logging",
                                 "Test"]))

        cfg = Configuration()
        self.assertTrue(mock_save.called)
        self.assertFalse(mock_read.called)
        self.assertTrue(mock_file.called)
        mock_logging.assert_called_with(
            os.path.join(self.userdir, "BatchAppsData"))

        self.assertEqual(sorted(cfg._config.sections()),
                         sorted(["Authentication",
                                 "Blender",
                                 "Logging",
                                 "Test"]))

        cfg = Configuration(data_path="c:\\mypath",
                            log_level=10,
                            datadir="data")

        self.assertFalse(mock_read.called)
        mock_dir.assert_any_call("c:\\mypath")
        mock_dir.assert_any_call(self.userdir)
        mock_logging.assert_called_with(os.path.join(self.userdir, "data"))
        mock_level.assert_called_with(10)

        mock_file.return_value = True
        cfg = Configuration(default=True)
        self.assertTrue(mock_save.called)
        self.assertFalse(mock_read.called)

        mock_save.reset()
        mock_read.side_effect = OSError("test")
        cfg = Configuration(data_path=self.test_dir, application='Blender')
        self.assertTrue(mock_save.called)
        self.assertTrue(mock_read.called)
        self.assertEqual(cfg.jobtype, "Blender")
        self.assertEqual(cfg.job_type, "Blender")

        cfg = Configuration(data_path=self.test_dir, jobtype=None)
        self.assertEqual(cfg.jobtype, "Blender")
        self.assertEqual(cfg.job_type, "Blender")

        with self.assertRaises(InvalidConfigException):
            Configuration(application='TestApp', default=True)
        with self.assertRaises(InvalidConfigException):
            Configuration(jobtype=42, default=True)

Example 13

View license
    @mock.patch.object(batchapps.file_manager.os.path, 'isfile')
    @mock.patch.object(batchapps.file_manager.os.path, 'isdir')
    @mock.patch('batchapps.file_manager.glob')
    @mock.patch('batchapps.credentials.Configuration')
    @mock.patch('batchapps.credentials.Credentials')
    @mock.patch('batchapps.api.BatchAppsApi')
    @mock.patch.object(batchapps.file_manager.FileManager, "create_file_set")
    def test_filemgr_files_from_dir_a(self,
                                      mock_file,
                                      mock_api,
                                      mock_creds,
                                      mock_cfg,
                                      mock_glob,
                                      mock_isdir,
                                      mock_isfile):
        """Test files_from_dir"""

        mgr = FileManager(mock_creds, cfg=mock_cfg)
        mock_isdir.return_value = False
        mock_isfile.return_value = True

        with self.assertRaises(OSError):
            mgr.files_from_dir(None)
        with self.assertRaises(OSError):
            mgr.files_from_dir("")
        with self.assertRaises(OSError):
            mgr.files_from_dir(42)

        if not self.use_test_files:
            self.skipTest("No test files present")

        mock_isdir.return_value = True
        mgr.files_from_dir(os.path.join(self.test_dir, "test_config"))
        mock_glob.glob.assert_called_with(os.path.join(self.test_dir,
                                                       "test_config",
                                                       '*'))

        mgr.files_from_dir(os.path.join(self.test_dir, "test_config"),
                           recursive=True)

        mock_glob.glob.assert_any_call(os.path.join(self.test_dir,
                                                    "test_config",
                                                    '*'))

        mock_glob.glob.assert_any_call(os.path.join(self.test_dir,
                                                    "test_config",
                                                    "batch_apps.ini",
                                                    '*'))


        mock_glob.reset()
        mock_glob.glob.call_count = 0
        mgr.files_from_dir(self.test_dir, recursive=False)
        mock_glob.glob.assert_any_call(self.test_dir + "\\*")
        self.assertEqual(mock_glob.glob.call_count, 1)

        mock_glob.reset()
        mock_glob.glob.call_count = 0
        mgr.files_from_dir(self.test_dir,
                           recursive=True,
                           pattern="*.png")

        self.assertEqual(mock_glob.glob.call_count, 6)
        mock_glob.glob.assert_any_call(self.test_dir + "\\*.png")
        mock_glob.glob.assert_any_call(self.test_dir + "\\test_config\\*.png")

Example 14

Project: iktomi
Source File: sqla.py
View license
    def test_create_drop_tables_several_meta(self):
        Base1 = declarative_base()

        class A1(Base1):
            __tablename__ = 'A'
            id = Column(Integer, primary_key=True)

        Base2 = declarative_base()

        class A2(Base2):
            __tablename__ = 'A'
            id = Column(Integer, primary_key=True)

        engine1 = create_engine('sqlite://')
        engine2 = create_engine('sqlite://')
        binds = {
            A1.__table__: engine1,
            A2.__table__: engine2,
        }
        meta = {
            'm1': Base1.metadata,
            'm2': Base2.metadata,
            'm3': MetaData(),
        }
        cli = Sqla(orm.sessionmaker(binds=binds), metadata=meta)

        for verbose in [False, True]:

            cli.command_create_tables(verbose=verbose)
            self.assertTrue(engine1.has_table('A'))
            self.assertTrue(engine2.has_table('A'))

            with mock.patch.object(sys.stdin, 'readline', return_value='y'):
                cli.command_drop_tables('m1')
            self.assertFalse(engine1.has_table('A'))
            self.assertTrue(engine2.has_table('A'))

            with mock.patch.object(sys.stdin, 'readline', return_value='y'):
                cli.command_drop_tables()
            self.assertFalse(engine1.has_table('A'))
            self.assertFalse(engine2.has_table('A'))

            cli.command_create_tables('m1', verbose=verbose)
            self.assertTrue(engine1.has_table('A'))
            self.assertFalse(engine2.has_table('A'))

            with mock.patch.object(sys.stdin, 'readline', return_value='y'):
                cli.command_drop_tables()
            self.assertFalse(engine1.has_table('A'))
            self.assertFalse(engine2.has_table('A'))

            cli.command_create_tables('m3', verbose=verbose)
            self.assertFalse(engine1.has_table('A'))
            self.assertFalse(engine2.has_table('A'))

Example 15

Project: iktomi
Source File: sqla.py
View license
    def test_schema_several_meta(self):
        Base1 = declarative_base()

        class A1(Base1):
            __tablename__ = 'A'
            id = Column(Integer, primary_key=True)

        class B1(Base1):
            __tablename__ = 'B'
            id = Column(Integer, primary_key=True)

        Base2 = declarative_base()

        class A2(Base2):
            __tablename__ = 'A'
            id = Column(Integer, primary_key=True)

        engine1 = create_engine('sqlite://')
        engine2 = create_engine('sqlite://')
        binds = {
            A1.__table__: engine1,
            B1.__table__: engine1,
            A2.__table__: engine2,
        }
        meta = {
            'm1': Base1.metadata,
            'm2': Base2.metadata,
            'm3': MetaData(),
        }
        cli = Sqla(orm.sessionmaker(binds=binds), metadata=meta)

        output = StringIO()
        with mock.patch.object(sys, 'stdout', output):
            cli.command_schema()
        created = self._created_tables(output.getvalue())
        self.assertEqual(len(created), 3)
        self.assertEqual(created.count('A'), 2)
        self.assertEqual(created.count('B'), 1)

        output = StringIO()
        with mock.patch.object(sys, 'stdout', output):
            cli.command_schema('m1')
        created = self._created_tables(output.getvalue())
        self.assertEqual(len(created), 2)
        self.assertEqual(created.count('A'), 1)
        self.assertEqual(created.count('B'), 1)

        output = StringIO()
        with mock.patch.object(sys, 'stdout', output):
            cli.command_schema('m1.B')
        created = self._created_tables(output.getvalue())
        self.assertEqual(created, ['B'])

        output = StringIO()
        with mock.patch.object(sys, 'stdout', output):
            try:
                cli.command_schema('m2.B')
            except SystemExit:
                pass
        created = self._created_tables(output.getvalue())
        self.assertEqual(created, [])

        output = StringIO()
        with mock.patch.object(sys, 'stdout', output):
            try:
                cli.command_schema('m3.A')
            except SystemExit:
                pass
        created = self._created_tables(output.getvalue())
        self.assertEqual(created, [])

Example 16

Project: st2
Source File: test_mistral_v2_auth.py
View license
    @mock.patch.object(
        client.Client, 'authenticate',
        mock.MagicMock(return_value=(cfg.CONF.mistral.v2_base_url, '123', 'abc', 'xyz')))
    @mock.patch.object(
        workflows.WorkflowManager, 'list',
        mock.MagicMock(return_value=[]))
    @mock.patch.object(
        workflows.WorkflowManager, 'get',
        mock.MagicMock(return_value=WF1))
    @mock.patch.object(
        workflows.WorkflowManager, 'create',
        mock.MagicMock(return_value=[WF1]))
    @mock.patch.object(
        executions.ExecutionManager, 'create',
        mock.MagicMock(return_value=executions.Execution(None, WF1_EXEC)))
    def test_launch_workflow_with_mistral_auth(self):
        cfg.CONF.set_default('keystone_username', 'foo', group='mistral')
        cfg.CONF.set_default('keystone_password', 'bar', group='mistral')
        cfg.CONF.set_default('keystone_project_name', 'admin', group='mistral')
        cfg.CONF.set_default('keystone_auth_url', 'http://127.0.0.1:5000/v3', group='mistral')

        MistralRunner.entry_point = mock.PropertyMock(return_value=WF1_YAML_FILE_PATH)
        liveaction = LiveActionDB(action=WF1_NAME, parameters=ACTION_PARAMS)
        liveaction, execution = action_service.request(liveaction)
        liveaction = LiveAction.get_by_id(str(liveaction.id))
        self.assertEqual(liveaction.status, action_constants.LIVEACTION_STATUS_RUNNING)

        mistral_context = liveaction.context.get('mistral', None)
        self.assertIsNotNone(mistral_context)
        self.assertEqual(mistral_context['execution_id'], WF1_EXEC.get('id'))
        self.assertEqual(mistral_context['workflow_name'], WF1_EXEC.get('workflow_name'))

        workflow_input = copy.deepcopy(ACTION_PARAMS)
        workflow_input.update({'count': '3'})

        env = {
            'st2_execution_id': str(execution.id),
            'st2_liveaction_id': str(liveaction.id),
            'st2_action_api_url': 'http://0.0.0.0:9101/v1',
            '__actions': {
                'st2.action': {
                    'st2_context': {
                        'api_url': 'http://0.0.0.0:9101/v1',
                        'endpoint': 'http://0.0.0.0:9101/v1/actionexecutions',
                        'parent': {
                            'execution_id': str(execution.id)
                        },
                        'notify': {},
                        'skip_notify_tasks': []
                    }
                }
            }
        }

        client.Client.authenticate.assert_called_with(
            cfg.CONF.mistral.v2_base_url,
            cfg.CONF.mistral.keystone_username,
            cfg.CONF.mistral.keystone_password,
            cfg.CONF.mistral.keystone_project_name,
            cfg.CONF.mistral.keystone_auth_url,
            None, 'publicURL', 'workflow', None, None, None, False)

        executions.ExecutionManager.create.assert_called_with(
            WF1_NAME, workflow_input=workflow_input, env=env)

Example 17

Project: st2
Source File: test_mistral_v2_rerun.py
View license
    @mock.patch.object(
        workflows.WorkflowManager, 'list',
        mock.MagicMock(return_value=[]))
    @mock.patch.object(
        workflows.WorkflowManager, 'get',
        mock.MagicMock(return_value=WF1))
    @mock.patch.object(
        workbooks.WorkbookManager, 'create',
        mock.MagicMock(return_value=WB1))
    @mock.patch.object(
        executions.ExecutionManager, 'create',
        mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC)))
    @mock.patch.object(
        executions.ExecutionManager, 'get',
        mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC_ERRORED)))
    @mock.patch.object(
        executions.ExecutionManager, 'list',
        mock.MagicMock(
            return_value=[
                executions.Execution(None, WB1_MAIN_EXEC_ERRORED),
                executions.Execution(None, WB1_SUB1_EXEC_ERRORED)]))
    @mock.patch.object(
        tasks.TaskManager, 'list',
        mock.MagicMock(side_effect=[WB1_MAIN_TASKS, WB1_SUB1_TASKS]))
    @mock.patch.object(
        tasks.TaskManager, 'rerun',
        mock.MagicMock(return_value=None))
    def test_resume_subworkflow_task(self):
        MistralRunner.entry_point = mock.PropertyMock(return_value=WB1_YAML_FILE_PATH)
        liveaction1 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS)
        liveaction1, execution1 = action_service.request(liveaction1)

        # Rerun the execution.
        context = {
            're-run': {
                'ref': execution1.id,
                'tasks': ['greet.say-friend']
            }
        }

        liveaction2 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS, context=context)
        liveaction2, execution2 = action_service.request(liveaction2)
        liveaction2 = LiveAction.get_by_id(str(liveaction2.id))

        self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_RUNNING)

        expected_env = {
            'st2_liveaction_id': str(liveaction2.id),
            'st2_execution_id': str(execution2.id),
            '__actions': {
                'st2.action': {
                    'st2_context': {
                        'api_url': 'http://0.0.0.0:9101/v1',
                        'endpoint': 'http://0.0.0.0:9101/v1/actionexecutions',
                        'notify': {},
                        'parent': {
                            're-run': context['re-run'],
                            'execution_id': str(execution2.id)
                        },
                        'skip_notify_tasks': []
                    }
                }
            },
            'st2_action_api_url': 'http://0.0.0.0:9101/v1'
        }

        tasks.TaskManager.rerun.assert_called_with(
            WB1_SUB1_TASK2['id'],
            reset=False,
            env=expected_env
        )

Example 18

Project: st2
Source File: test_mistral_v2_rerun.py
View license
    @mock.patch.object(
        workflows.WorkflowManager, 'list',
        mock.MagicMock(return_value=[]))
    @mock.patch.object(
        workflows.WorkflowManager, 'get',
        mock.MagicMock(return_value=WF1))
    @mock.patch.object(
        workbooks.WorkbookManager, 'create',
        mock.MagicMock(return_value=WB1))
    @mock.patch.object(
        executions.ExecutionManager, 'create',
        mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC)))
    @mock.patch.object(
        executions.ExecutionManager, 'get',
        mock.MagicMock(return_value=executions.Execution(None, WB1_MAIN_EXEC_ERRORED)))
    @mock.patch.object(
        executions.ExecutionManager, 'list',
        mock.MagicMock(
            return_value=[
                executions.Execution(None, WB1_MAIN_EXEC_ERRORED),
                executions.Execution(None, WB1_SUB1_EXEC_ERRORED)]))
    @mock.patch.object(
        tasks.TaskManager, 'list',
        mock.MagicMock(side_effect=[WB1_MAIN_TASKS, WB1_SUB1_TASKS]))
    @mock.patch.object(
        tasks.TaskManager, 'rerun',
        mock.MagicMock(return_value=None))
    def test_resume_and_reset_subworkflow_task(self):
        MistralRunner.entry_point = mock.PropertyMock(return_value=WB1_YAML_FILE_PATH)
        liveaction1 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS)
        liveaction1, execution1 = action_service.request(liveaction1)

        # Rerun the execution.
        context = {
            're-run': {
                'ref': execution1.id,
                'tasks': ['greet.say-friend'],
                'reset': ['greet.say-friend']
            }
        }

        liveaction2 = LiveActionDB(action=WB1_NAME, parameters=ACTION_PARAMS, context=context)
        liveaction2, execution2 = action_service.request(liveaction2)
        liveaction2 = LiveAction.get_by_id(str(liveaction2.id))

        self.assertEqual(liveaction2.status, action_constants.LIVEACTION_STATUS_RUNNING)

        expected_env = {
            'st2_liveaction_id': str(liveaction2.id),
            'st2_execution_id': str(execution2.id),
            '__actions': {
                'st2.action': {
                    'st2_context': {
                        'api_url': 'http://0.0.0.0:9101/v1',
                        'endpoint': 'http://0.0.0.0:9101/v1/actionexecutions',
                        'notify': {},
                        'parent': {
                            're-run': context['re-run'],
                            'execution_id': str(execution2.id)
                        },
                        'skip_notify_tasks': []
                    }
                }
            },
            'st2_action_api_url': 'http://0.0.0.0:9101/v1'
        }

        tasks.TaskManager.rerun.assert_called_with(
            WB1_SUB1_TASK2['id'],
            reset=True,
            env=expected_env
        )

Example 19

Project: viewfinder
Source File: secrets_test.py
View license
  def testEncrypted(self):
    """Test secrets manager with encrypted secrets."""

    # The only way to make a secret manager encrypt when empty is to ask it
    # to prompt for a passphrase. It does so using getpass.getpass.
    passphrase = 'my voice is my passport!'
    with mock.patch.object(secrets.getpass, 'getpass') as getpass:
      getpass.return_value = passphrase
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      mgr.Init(should_prompt=True)

    # Secret will be encrypted.
    mgr.PutSecret('foo', 'codeforfoo')
    self.assertEqual(mgr.GetSecret('foo'), 'codeforfoo')
    with open(os.path.join(self._shared_dir, self._domain, 'foo')) as f:
      contents = f.read()
      self.assertNotEqual(contents, 'codeforfoo')
      (cipher, ciphertext) = json.loads(contents)
      self.assertEqual(cipher, 'AES')
      # TODO(marc): maybe we should test the encryption itself.

    # Now create a new secrets manager. We do not ask it to prompt, it will figure it out
    # all by itself. It does this in a number of ways:


    ##################### --devbox=False ########################
    options.options.devbox = False

    # Set stdin to raise an exception, just to make sure we're not using it.
    with mock.patch.object(secrets.getpass, 'getpass') as getpass:
      getpass.side_effect = Exception('you should not be using stdin in --devbox=False mode')
      # Uses --passphrase if specified.
      options.options.passphrase = passphrase
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      mgr.Init()
      self.assertEqual(mgr.GetSecret('foo'), 'codeforfoo')

      # We get an assertion error when a passphrase is supplied but bad. This is because it fails on sha sum.
      options.options.passphrase = 'bad passphrase'
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      self.assertRaises(AssertionError, mgr.Init)
    
      # Uses AMI metadata otherwise.
      options.options.passphrase = None
      # No AMI fetched, or passphrase not one of the fetched fields.
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      self.assertRaisesRegexp(CannotReadEncryptedSecretError, 'failed to fetch passphrase from AWS instance metadata',
                              mgr.Init)
    
      # Good passphrase from AMI metadata.
      ami_metadata.SetAMIMetadata({'user-data/passphrase': passphrase})
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      mgr.Init()
      self.assertEqual(mgr.GetSecret('foo'), 'codeforfoo')
    
      # Bad passphrase from AMI metadata.
      ami_metadata.SetAMIMetadata({'user-data/passphrase': 'not a good passphrase.'})
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      self.assertRaises(AssertionError, mgr.Init)


    ##################### --devbox=True ########################
    options.options.devbox = True
    # Set bad AMI metadata just to show that we never use it.
    ami_metadata.SetAMIMetadata({'user-data/passphrase': 'not a good passphrase.'})

    # Uses --passphrase if specified.
    options.options.passphrase = passphrase
    mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
    mgr.Init()
    self.assertEqual(mgr.GetSecret('foo'), 'codeforfoo')

    # If --passphrase is None and we cannot prompt, we have no way of getting the passphrase.
    options.options.passphrase = None
    mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
    self.assertRaisesRegexp(CannotReadEncryptedSecretError, 'passphrase is required but was not provided',
                            mgr.Init, can_prompt=False)

    # Passphrase is read from stdin if prompting is allowed.
    with mock.patch.object(secrets.getpass, 'getpass') as getpass:
      getpass.return_value = passphrase
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      mgr.Init()
      self.assertEqual(mgr.GetSecret('foo'), 'codeforfoo')

    # Pass a bad passphrase on stdin.
    with mock.patch.object(secrets.getpass, 'getpass') as getpass:
      getpass.return_value = 'not a good passphrase'
      mgr = secrets.SecretsManager('test', self._domain, self._shared_dir)
      self.assertRaises(AssertionError, mgr.Init)

Example 20

Project: viewfinder
Source File: remove_contacts_test.py
View license
  @mock.patch.object(Contact, 'MAX_CONTACTS_LIMIT', 2)
  @mock.patch.object(Contact, 'MAX_REMOVED_CONTACTS_LIMIT', 2)
  @mock.patch.object(Operation, 'FAILPOINTS_ENABLED', True)
  def testMaxRemovedContactsLimit(self):
    """Test exceeding removed contact limit and observe reset notification as well as deletion of all removed
    contacts.
    """

    def _CheckExpected(total_present_contact_row_count, total_removed_contact_row_count,
                       reset_removed_contacts_notification):
      actual_present_count = 0
      actual_removed_count = 0
      all_contact_rows = self._RunAsync(Contact.RangeQuery,
                                        self._client,
                                        self._user.user_id,
                                        range_desc=None,
                                        limit=100,
                                        col_names=None)
      for contact in all_contact_rows:
        if contact.IsRemoved():
          actual_removed_count += 1
        else:
          actual_present_count += 1
      self.assertEqual(total_present_contact_row_count, actual_present_count)
      self.assertEqual(total_removed_contact_row_count, actual_removed_count)

      actual_removed_contacts_reset = _CheckForRemovedContactsReset(self._tester, self._user.user_id)
      self.assertEqual(reset_removed_contacts_notification, actual_removed_contacts_reset)

    upload_result = self._tester.UploadContacts(self._cookie,
                                                [{'identities': [{'identity': 'Email:[email protected]'}],
                                                  'contact_source': Contact.MANUAL}])
    contact1_id = upload_result['contact_ids'][0]
    _CheckExpected(1, 0, False)  # Contact state: present: [contact1_id], removed: []

    upload_result = self._tester.UploadContacts(self._cookie,
                                                [{'identities': [{'identity': 'Email:[email protected]'}],
                                                  'contact_source': Contact.MANUAL}])
    contact2_id = upload_result['contact_ids'][0]
    _CheckExpected(2, 0, False)  # Contact state: present: [contact1_id,contact2_id], removed: []

    # This shouldn't trigger the removed contacts reset.
    self._tester.RemoveContacts(self._cookie, [contact1_id])
    _CheckExpected(1, 1, False)  # Contact state: present: [contact2_id], removed: [contact1_id]

    # Add back the first contact.
    upload_result = self._tester.UploadContacts(self._cookie,
                                                [{'identities': [{'identity': 'Email:[email protected]'}],
                                                  'contact_source': Contact.MANUAL}])
    contact3_id = upload_result['contact_ids'][0]
    _CheckExpected(2, 0, False) # Contact state: present: [contact1_id,contact2_id], removed: []

    # This shouldn't trigger the removed contacts reset.
    self._tester.RemoveContacts(self._cookie, [contact1_id])
    _CheckExpected(1, 1, False) # Contact state: present: [contact2_id], removed: [contact1_id]

    # Upload 3rd different contact.
    upload_result = self._tester.UploadContacts(self._cookie,
                                                [{'identities': [{'identity': 'Email:[email protected]'}],
                                                  'contact_source': Contact.MANUAL}])
    contact3_id = upload_result['contact_ids'][0]
    _CheckExpected(2, 1, False)  # Contact state: present: [contact2_id,contact3_id], removed: [contact1_id]

    # Remove 3rd one which should get us to 2 removed contacts and trigger contact reset.
    self._tester.RemoveContacts(self._cookie, [contact3_id])
    _CheckExpected(1, 0, True) # Contact state: present: [contact2_id], removed: []

    # Check that we can query_contacts with the None start_key that may be sent in a remove_contacts notification.
    query_result = self._tester.QueryContacts(self._cookie, start_key=None)
    self.assertEqual(1, len(query_result['contacts']))
    self.assertEqual(contact2_id, query_result['contacts'][0]['contact_id'])

Example 21

Project: elastalert
Source File: base_test.py
View license
def test_set_starttime(ea):
    # standard query, no starttime, no last run
    end = ts_to_dt('2014-10-10T10:10:10')
    with mock.patch.object(ea, 'get_starttime') as mock_gs:
        mock_gs.return_value = None
        ea.set_starttime(ea.rules[0], end)
        assert mock_gs.call_count == 1
    assert ea.rules[0]['starttime'] == end - ea.buffer_time

    # Standard query, no starttime, rule specific buffer_time
    ea.rules[0].pop('starttime')
    ea.rules[0]['buffer_time'] = datetime.timedelta(minutes=37)
    with mock.patch.object(ea, 'get_starttime') as mock_gs:
        mock_gs.return_value = None
        ea.set_starttime(ea.rules[0], end)
        assert mock_gs.call_count == 1
    assert ea.rules[0]['starttime'] == end - datetime.timedelta(minutes=37)
    ea.rules[0].pop('buffer_time')

    # Standard query, no starttime, last run
    ea.rules[0].pop('starttime')
    with mock.patch.object(ea, 'get_starttime') as mock_gs:
        mock_gs.return_value = ts_to_dt('2014-10-10T00:00:00')
        ea.set_starttime(ea.rules[0], end)
        assert mock_gs.call_count == 1
    assert ea.rules[0]['starttime'] == ts_to_dt('2014-10-10T00:00:00')

    # Standard query, no starttime, last run, assure buffer_time doesn't go past
    ea.rules[0].pop('starttime')
    ea.rules[0]['buffer_time'] = datetime.timedelta(weeks=1000)
    with mock.patch.object(ea, 'get_starttime') as mock_gs:
        mock_gs.return_value = ts_to_dt('2014-10-09T00:00:00')
        # First call sets minumum_time
        ea.set_starttime(ea.rules[0], end)
    # Second call uses buffer_time, but it goes past minimum
    ea.set_starttime(ea.rules[0], end)
    assert ea.rules[0]['starttime'] == ts_to_dt('2014-10-09T00:00:00')

    # Standard query, starttime
    ea.rules[0].pop('buffer_time')
    ea.rules[0].pop('minimum_starttime')
    with mock.patch.object(ea, 'get_starttime') as mock_gs:
        mock_gs.return_value = None
        ea.set_starttime(ea.rules[0], end)
        assert mock_gs.call_count == 0
    assert ea.rules[0]['starttime'] == end - ea.buffer_time

    # Count query, starttime, no previous endtime
    ea.rules[0]['use_count_query'] = True
    ea.rules[0]['doc_type'] = 'blah'
    with mock.patch.object(ea, 'get_starttime') as mock_gs:
        mock_gs.return_value = None
        ea.set_starttime(ea.rules[0], end)
        assert mock_gs.call_count == 0
    assert ea.rules[0]['starttime'] == end - ea.run_every

    # Count query, with previous endtime
    with mock.patch('elastalert.elastalert.elasticsearch_client'):
        ea.run_rule(ea.rules[0], END, START)
    ea.set_starttime(ea.rules[0], end)
    assert ea.rules[0]['starttime'] == END

    # buffer_time doesn't go past previous endtime
    ea.rules[0].pop('use_count_query')
    ea.rules[0]['previous_endtime'] = end - ea.buffer_time * 2
    ea.set_starttime(ea.rules[0], end)
    assert ea.rules[0]['starttime'] == ea.rules[0]['previous_endtime']

Example 22

Project: badwolf
Source File: test_lint.py
View license
def test_flake8_lint_a_py(app, caplog):
    diff = """diff --git a/a.py b/a.py
new file mode 100644
index 0000000..fdeea15
--- /dev/null
+++ b/a.py
@@ -0,0 +1,6 @@
+# -*- coding: utf-8 -*-
+from __future__ import absolute_import, unicode_literals
+
+
+def add(a, b):
+    return a+ b
"""

    context = Context(
        'deepanalyzer/badwolf',
        None,
        'pullrequest',
        'message',
        {'commit': {'hash': '000000'}},
        {'commit': {'hash': '111111'}},
        pr_id=1
    )
    spec = Specification()
    spec.linters.append(ObjectDict(name='flake8', pattern=None))
    lint = LintProcessor(context, spec, os.path.join(FIXTURES_PATH, 'flake8'))
    patch = PatchSet(diff.split('\n'))
    with mock.patch.object(lint, 'load_changes') as load_changes,\
            mock.patch.object(lint, 'update_build_status') as build_status,\
            mock.patch.object(lint, '_report') as report:
        load_changes.return_value = patch
        build_status.return_value = None
        report.return_value = None
        lint.problems.set_changes(patch)
        lint.process()

        assert load_changes.called

    assert len(lint.problems) == 1
    problem = lint.problems[0]
    assert problem.filename == 'a.py'
    assert problem.line == 6

Example 23

View license
    def setUp(self):
        SubManFixture.setUp(self)
        # we have to have a reference to the patchers
        #self.patcher2 = mock.patch.object(entcertlib.EntCertUpdateAction, '_get_consumer_id')
        #self.entcertlib_updateaction_getconsumerid = self.patcher2.start()

        self.patcher3 = mock.patch.object(repolib.RepoUpdateActionCommand, 'perform')
        self.repolib_updateaction_perform = self.patcher3.start()

        self.patcher6 = mock.patch('subscription_manager.managerlib.persist_consumer_cert')
        self.managerlib_persist_consumer_cert = self.patcher6.start()

        # mock out all hardware fetching... we may need to fake socket counts
        self.hwprobe_getall_patcher = mock.patch.object(hwprobe.Hardware, 'get_all')
        self.hwprobe_getall_mock = self.hwprobe_getall_patcher.start()
        self.hwprobe_getall_mock.return_value = {}

        self.patcher_entcertlib_writer = mock.patch("subscription_manager.entcertlib.Writer")
        self.entcertlib_writer = self.patcher_entcertlib_writer.start()

        self.patcher_entcertlib_action_syslogreport = mock.patch.object(entcertlib.EntCertUpdateAction, 'syslog_results')
        self.update_action_syslog_mock = self.patcher_entcertlib_action_syslogreport.start()

        # some stub certs
        stub_product = stubs.StubProduct('stub_product')
        self.stub_ent1 = stubs.StubEntitlementCertificate(stub_product)
        self.stub_ent2 = stubs.StubEntitlementCertificate(stub_product)
        self.stub_ent_expires_tomorrow = \
            stubs.StubEntitlementCertificate(stub_product,
                                             end_date=datetime.now() + timedelta(days=1))

        self.stub_ent_expires_tomorrow_ent_dir = \
            stubs.StubEntitlementDirectory([self.stub_ent_expires_tomorrow])

        self.local_ent_certs = [self.stub_ent1, self.stub_ent2]
        self.stub_entitled_proddir = \
            stubs.StubProductDirectory([stubs.StubProductCertificate(stub_product)])

        # local entitlement dir
        self.stub_ent_dir = stubs.StubEntitlementDirectory(self.local_ent_certs)
        inj.provide(inj.ENT_DIR, self.stub_ent_dir)

        self.mock_uep = mock.Mock()
        self.mock_uep.getCertificateSerials = mock.Mock(return_value=[{'serial': self.stub_ent1.serial},
                                                                        {'serial': self.stub_ent2.serial}])
        self.mock_uep.getConsumer = mock.Mock(return_value=CONSUMER_DATA)
        self.set_consumer_auth_cp(self.mock_uep)

        stub_release = {'releaseVer': '6.4'}
        self.mock_uep.getRelease = mock.Mock(return_value=stub_release)

        # we need to mock the consumers uuid with the mocked GoneExceptions
        # uuid
        self._inject_mock_valid_consumer(uuid="234234")

        self.repolib_updateaction_perform.return_value = 0

        # Setup a mock cert sorter to initiate the behaviour we want to test.
        # Must use a non-callable mock for our features dep injection
        # framework.
        self.mock_cert_sorter = mock.NonCallableMock()

        # TODO: need to provide return for "getRelease" for repolib stuff

        injection.provide(injection.CERT_SORTER, self.mock_cert_sorter)

Example 24

Project: pypackage
Source File: test_configure.py
View license
@pytest.mark.parametrize(
    "flags, banner",
    [
        ("-e", "Starting interactive build..."),
        ("-re", "Reconfiguring..."),
    ],
    ids=("extended/normal", "re-build")
)
def test_interactive_setup(capfd, reset_sys_argv, move_home_pypackage,
                           flags, banner):
    """Ensure the calls made and feedback during the interactive setup."""

    conf = Config()
    sys.argv = ["py-build", flags]
    opts = get_options()

    standard_patch = mock.patch.object(
        configure,
        "standard_attributes",
        return_value=["standard"],
    )
    feature_patch = mock.patch.object(
        configure,
        "feature_attributes",
        return_value=["feature"],
    )
    extended_patch = mock.patch.object(
        configure,
        "extended_attributes",
        return_value=["extended"],
    )
    set_value_patch = mock.patch.object(configure, "set_value_in_config")

    with standard_patch:
        with feature_patch:
            with extended_patch:
                with set_value_patch as patched_set_value:
                    assert configure.run_interactive_setup(conf, opts) == conf

    expected_calls = [
        mock.call.Call("standard", conf, conf._KEYS),
        mock.call.Call("feature", conf, conf._PYPACKAGE_KEYS),
        mock.call.Call("extended", conf, conf._KEYS),
    ]
    assert patched_set_value.mock_calls == expected_calls

    out, err = capfd.readouterr()
    assert banner in out
    assert "~ Standard Attributes ~" in out
    assert "~ Pypackage Features ~" in out
    assert "~ Extended Attributes ~" in out
    assert not err

Example 25

Project: macops
Source File: profiles_test.py
View license
  @mock.patch.object(profiles.NetworkProfile, 'AddPayload')
  @mock.patch.object(profiles.crypto, 'load_privatekey')
  @mock.patch.object(profiles.crypto, 'load_certificate')
  @mock.patch.object(profiles.crypto, 'PKCS12Type')
  @mock.patch.object(profiles.certs, 'Certificate')
  def testAddMachineCertificateSuccess(self, mock_certificate, mock_pkcs12,
                                       mock_loadcert, mock_loadkey,
                                       mock_addpayload):
    mock_certobj = mock.MagicMock()
    mock_certobj.subject_cn = 'My Cert Subject'
    mock_certobj.osx_fingerprint = '0011223344556677889900'
    mock_certificate.return_value = mock_certobj

    mock_pkcs12obj = mock.MagicMock()
    mock_pkcs12obj.export.return_value = '-----PKCS12 Data-----'
    mock_pkcs12.return_value = mock_pkcs12obj

    mock_loadcert.return_value = 'certobj'
    mock_loadkey.return_value = 'keyobj'

    profile = profiles.NetworkProfile('testuser')
    profile.AddMachineCertificate('fakecert', 'fakekey')

    mock_pkcs12.assert_called_once_with()
    mock_pkcs12obj.set_certificate.assert_called_once_with('certobj')
    mock_pkcs12obj.set_privatekey.assert_called_once_with('keyobj')
    mock_pkcs12obj.export.assert_called_once_with('0011223344556677889900')
    mock_loadcert.assert_called_once_with(1, 'fakecert')
    mock_loadkey.assert_called_once_with(1, 'fakekey')

    mock_addpayload.assert_called_once_with(
        {profiles.PAYLOADKEYS_IDENTIFIER:
             'com.megacorp.networkprofile.machine_cert',
         profiles.PAYLOADKEYS_TYPE: 'com.apple.security.pkcs12',
         profiles.PAYLOADKEYS_DISPLAYNAME: 'My Cert Subject',
         profiles.PAYLOADKEYS_ENABLED: True,
         profiles.PAYLOADKEYS_VERSION: 1,
         profiles.PAYLOADKEYS_CONTENT: profiles.plistlib.Data(
             '-----PKCS12 Data-----'),
         profiles.PAYLOADKEYS_UUID: mock.ANY,
         'Password': '0011223344556677889900'})

Example 26

Project: graphite-beacon
Source File: tests.py
View license
def test_multimetrics(reactor):
    from graphite_beacon.alerts import BaseAlert

    alert = BaseAlert.get(
        reactor, name="Test", query="*", rules=[
            "critical: > 100", "warning: > 50", "warning: < historical / 2"])
    reactor.alerts = set([alert])

    with mock.patch.object(reactor, 'notify'):
        alert.check([(110, 'metric1'), (60, 'metric2'), (30, 'metric3')])

        assert reactor.notify.call_count == 2

        # metric1 - critical
        assert reactor.notify.call_args_list[0][0][0] == 'critical'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric1'

        # metric2 - warning
        assert reactor.notify.call_args_list[1][0][0] == 'warning'
        assert reactor.notify.call_args_list[1][1]['target'] == 'metric2'

    assert list(alert.history['metric1']) == [110]

    with mock.patch.object(reactor, 'notify'):
        alert.check([(60, 'metric1'), (60, 'metric2'), (30, 'metric3')])
        assert reactor.notify.call_count == 1

        # metric1 - warning, metric2 didn't change
        assert reactor.notify.call_args_list[0][0][0] == 'warning'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric1'

    assert list(alert.history['metric1']) == [110, 60]

    with mock.patch.object(reactor, 'notify'):
        alert.check([(60, 'metric1'), (30, 'metric2'), (105, 'metric3')])
        assert reactor.notify.call_count == 2

        # metric2 - normal
        assert reactor.notify.call_args_list[0][0][0] == 'normal'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric2'

        # metric3 - critical
        assert reactor.notify.call_args_list[1][0][0] == 'critical'
        assert reactor.notify.call_args_list[1][1]['target'] == 'metric3'

    assert list(alert.history['metric1']) == [110, 60, 60]

    with mock.patch.object(reactor, 'notify'):
        alert.check([(60, 'metric1'), (30, 'metric2'), (105, 'metric3')])
        assert reactor.notify.call_count == 0

    with mock.patch.object(reactor, 'notify'):
        alert.check([(70, 'metric1'), (21, 'metric2'), (105, 'metric3')])
        assert reactor.notify.call_count == 1

        # metric2 - historical warning
        assert reactor.notify.call_args_list[0][0][0] == 'warning'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric2'

    assert list(alert.history['metric1']) == [60, 60, 60, 70]
    assert alert.state['metric1'] == 'warning'

    reactor.repeat()

    assert alert.state == {
        None: 'normal', 'metric1': 'normal', 'metric2': 'normal', 'metric3': 'normal',
        'waiting': 'normal', 'loading': 'normal'}

Example 27

Project: graphite-beacon
Source File: tests.py
View license
def test_multimetrics(reactor):
    from graphite_beacon.alerts import BaseAlert

    alert = BaseAlert.get(
        reactor, name="Test", query="*", rules=[
            "critical: > 100", "warning: > 50", "warning: < historical / 2"])
    reactor.alerts = set([alert])

    with mock.patch.object(reactor, 'notify'):
        alert.check([(110, 'metric1'), (60, 'metric2'), (30, 'metric3')])

        assert reactor.notify.call_count == 2

        # metric1 - critical
        assert reactor.notify.call_args_list[0][0][0] == 'critical'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric1'

        # metric2 - warning
        assert reactor.notify.call_args_list[1][0][0] == 'warning'
        assert reactor.notify.call_args_list[1][1]['target'] == 'metric2'

    assert list(alert.history['metric1']) == [110]

    with mock.patch.object(reactor, 'notify'):
        alert.check([(60, 'metric1'), (60, 'metric2'), (30, 'metric3')])
        assert reactor.notify.call_count == 1

        # metric1 - warning, metric2 didn't change
        assert reactor.notify.call_args_list[0][0][0] == 'warning'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric1'

    assert list(alert.history['metric1']) == [110, 60]

    with mock.patch.object(reactor, 'notify'):
        alert.check([(60, 'metric1'), (30, 'metric2'), (105, 'metric3')])
        assert reactor.notify.call_count == 2

        # metric2 - normal
        assert reactor.notify.call_args_list[0][0][0] == 'normal'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric2'

        # metric3 - critical
        assert reactor.notify.call_args_list[1][0][0] == 'critical'
        assert reactor.notify.call_args_list[1][1]['target'] == 'metric3'

    assert list(alert.history['metric1']) == [110, 60, 60]

    with mock.patch.object(reactor, 'notify'):
        alert.check([(60, 'metric1'), (30, 'metric2'), (105, 'metric3')])
        assert reactor.notify.call_count == 0

    with mock.patch.object(reactor, 'notify'):
        alert.check([(70, 'metric1'), (21, 'metric2'), (105, 'metric3')])
        assert reactor.notify.call_count == 1

        # metric2 - historical warning
        assert reactor.notify.call_args_list[0][0][0] == 'warning'
        assert reactor.notify.call_args_list[0][1]['target'] == 'metric2'

    assert list(alert.history['metric1']) == [60, 60, 60, 70]
    assert alert.state['metric1'] == 'warning'

    reactor.repeat()

    assert alert.state == {
        None: 'normal', 'metric1': 'normal', 'metric2': 'normal', 'metric3': 'normal',
        'waiting': 'normal', 'loading': 'normal'}

Example 28

Project: turnstile
Source File: test_middleware.py
View license
    @mock.patch.object(utils, 'find_entrypoint')
    @mock.patch.object(control, 'ControlDaemon')
    @mock.patch.object(remote, 'RemoteControlDaemon')
    @mock.patch.object(middleware.LOG, 'info')
    def test_init_enable(self, mock_info, mock_RemoteControlDaemon,
                         mock_ControlDaemon, mock_find_entrypoint):
        entrypoints = {
            'turnstile.preprocessor': {
                'ep1': 'preproc1',
                'ep3': 'preproc3',
                'ep4': 'preproc4',
                'ep6': 'preproc6',
            },
            'turnstile.postprocessor': {
                'ep2': 'postproc2',
                'ep4': 'postproc4',
                'ep6': 'postproc6',
            },
        }

        mock_find_entrypoint.side_effect = \
            lambda x, y, compat=True: entrypoints[x].get(y)

        midware = middleware.TurnstileMiddleware('app', {
            'enable': 'ep1 ep2 ep3 ep4 ep5 ep6',
        })

        self.assertEqual(midware.app, 'app')
        self.assertEqual(midware.limits, [])
        self.assertEqual(midware.limit_sum, None)
        self.assertEqual(midware.mapper, None)
        self.assertIsInstance(midware.mapper_lock,
                              eventlet.semaphore.Semaphore)
        self.assertEqual(midware.conf._config, {
            None: dict(status='413 Request Entity Too Large',
                       enable='ep1 ep2 ep3 ep4 ep5 ep6'),
        })
        self.assertEqual(midware._db, None)
        self.assertEqual(midware.preprocessors, [
            'preproc1',
            'preproc3',
            'preproc4',
            'preproc6',
        ])
        self.assertEqual(midware.postprocessors, [
            'postproc6',
            'postproc4',
            'postproc2',
        ])
        self.assertEqual(midware.formatter, midware.format_delay)
        self.assertFalse(mock_RemoteControlDaemon.called)
        mock_ControlDaemon.assert_has_calls([
            mock.call(midware, midware.conf),
            mock.call().start(),
        ])
        mock_info.assert_called_once_with("Turnstile middleware initialized")

Example 29

Project: turnstile
Source File: test_middleware.py
View license
    @mock.patch.object(utils, 'find_entrypoint')
    @mock.patch.object(control, 'ControlDaemon')
    @mock.patch.object(remote, 'RemoteControlDaemon')
    @mock.patch.object(middleware.LOG, 'info')
    def test_init_processors(self, mock_info, mock_RemoteControlDaemon,
                             mock_ControlDaemon, mock_find_entrypoint):
        entrypoints = {
            'turnstile.preprocessor': {
                'ep1': 'preproc1',
                'ep3': 'preproc3',
                'ep4': 'preproc4',
                'ep6': 'preproc6',
                'preproc:ep5': 'preproc5',
            },
            'turnstile.postprocessor': {
                'ep2': 'postproc2',
                'ep4': 'postproc4',
                'ep6': 'postproc6',
                'postproc:ep5': 'postproc5',
            },
        }

        mock_find_entrypoint.side_effect = \
            lambda x, y, required=False: entrypoints[x].get(y)

        midware = middleware.TurnstileMiddleware('app', {
            'preprocess': 'ep1 ep3 ep4 preproc:ep5 ep6',
            'postprocess': 'ep6 postproc:ep5 ep4 ep2',
        })

        self.assertEqual(midware.app, 'app')
        self.assertEqual(midware.limits, [])
        self.assertEqual(midware.limit_sum, None)
        self.assertEqual(midware.mapper, None)
        self.assertIsInstance(midware.mapper_lock,
                              eventlet.semaphore.Semaphore)
        self.assertEqual(midware.conf._config, {
            None: dict(status='413 Request Entity Too Large',
                       preprocess='ep1 ep3 ep4 preproc:ep5 ep6',
                       postprocess='ep6 postproc:ep5 ep4 ep2'),
        })
        self.assertEqual(midware._db, None)
        self.assertEqual(midware.preprocessors, [
            'preproc1',
            'preproc3',
            'preproc4',
            'preproc5',
            'preproc6',
        ])
        self.assertEqual(midware.postprocessors, [
            'postproc6',
            'postproc5',
            'postproc4',
            'postproc2',
        ])
        self.assertEqual(midware.formatter, midware.format_delay)
        self.assertFalse(mock_RemoteControlDaemon.called)
        mock_ControlDaemon.assert_has_calls([
            mock.call(midware, midware.conf),
            mock.call().start(),
        ])
        mock_info.assert_called_once_with("Turnstile middleware initialized")

Example 30

Project: turnstile
Source File: test_middleware.py
View license
    @mock.patch.object(utils, 'find_entrypoint')
    @mock.patch.object(control, 'ControlDaemon')
    @mock.patch.object(remote, 'RemoteControlDaemon')
    @mock.patch.object(middleware.LOG, 'info')
    def test_init_processors(self, mock_info, mock_RemoteControlDaemon,
                             mock_ControlDaemon, mock_find_entrypoint):
        entrypoints = {
            'turnstile.preprocessor': {
                'ep1': 'preproc1',
                'ep3': 'preproc3',
                'ep4': 'preproc4',
                'ep6': 'preproc6',
                'preproc:ep5': 'preproc5',
            },
            'turnstile.postprocessor': {
                'ep2': 'postproc2',
                'ep4': 'postproc4',
                'ep6': 'postproc6',
                'postproc:ep5': 'postproc5',
            },
        }

        mock_find_entrypoint.side_effect = \
            lambda x, y, required=False: entrypoints[x].get(y)

        midware = middleware.TurnstileMiddleware('app', {
            'preprocess': 'ep1 ep3 ep4 preproc:ep5 ep6',
            'postprocess': 'ep6 postproc:ep5 ep4 ep2',
        })

        self.assertEqual(midware.app, 'app')
        self.assertEqual(midware.limits, [])
        self.assertEqual(midware.limit_sum, None)
        self.assertEqual(midware.mapper, None)
        self.assertIsInstance(midware.mapper_lock,
                              eventlet.semaphore.Semaphore)
        self.assertEqual(midware.conf._config, {
            None: dict(status='413 Request Entity Too Large',
                       preprocess='ep1 ep3 ep4 preproc:ep5 ep6',
                       postprocess='ep6 postproc:ep5 ep4 ep2'),
        })
        self.assertEqual(midware._db, None)
        self.assertEqual(midware.preprocessors, [
            'preproc1',
            'preproc3',
            'preproc4',
            'preproc5',
            'preproc6',
        ])
        self.assertEqual(midware.postprocessors, [
            'postproc6',
            'postproc5',
            'postproc4',
            'postproc2',
        ])
        self.assertEqual(midware.formatter, midware.format_delay)
        self.assertFalse(mock_RemoteControlDaemon.called)
        mock_ControlDaemon.assert_has_calls([
            mock.call(midware, midware.conf),
            mock.call().start(),
        ])
        mock_info.assert_called_once_with("Turnstile middleware initialized")

Example 31

Project: allura
Source File: test_tracker.py
View license
    @mock.patch.object(tracker, 'File')
    @mock.patch.object(tracker.h, 'make_app_admin_only')
    @mock.patch.object(tracker, 'g')
    @mock.patch.object(tracker, 'c')
    @mock.patch.object(tracker, 'ThreadLocalORMSession')
    @mock.patch.object(tracker, 'session')
    @mock.patch.object(tracker, 'M')
    @mock.patch.object(tracker, 'TM')
    def test_import_tool(self, TM, M, session, tlos, c, g, mao, File):
        importer = tracker.ForgeTrackerImporter()
        importer._load_json = mock.Mock(return_value={
            'tracker_config': {
                '_id': 'orig_id',
                'options': {
                    'foo': 'bar',
                },
            },
            'open_status_names': 'open statuses',
            'closed_status_names': 'closed statuses',
            'custom_fields': 'fields',
            'saved_bins': 'bins',
            'tickets': [
                {
                    'reported_by': 'rb1',
                    'assigned_to': 'at1',
                    'attachments': [{'url': 'u1'}, {'url': 'u2'}],
                    'ticket_num': 1,
                    'description': 'd1',
                    'created_date': '2013-09-01',
                    'mod_date': '2013-09-02',
                    'summary': 's1',
                    'custom_fields': 'cf1',
                    'status': 'st1',
                    'labels': 'l1',
                    'votes_down': 1,
                    'votes_up': 2,
                    'private': False,
                    'discussion_thread': {'posts': 'comments1'},
                },
                {
                    'reported_by': 'rb2',
                    'assigned_to': 'at2',
                    'ticket_num': 100,
                    'attachments': [{'url': 'u3'}, {'url': 'u4'}],
                    'description': 'd2',
                    'created_date': '2013-09-03',
                    'mod_date': '2013-09-04',
                    'summary': 's2',
                    'custom_fields': 'cf2',
                    'status': 'st2',
                    'labels': 'l2',
                    'votes_down': 3,
                    'votes_up': 5,
                    'private': True,
                    'discussion_thread': {'posts': 'comments2'},
                },
            ],
        })
        anonymous = mock.Mock(_id=None, is_anonymous=lambda: True)
        reporter = mock.Mock(is_anonymous=lambda: False)
        author = mock.Mock(is_anonymous=lambda: False)
        importer.get_user = mock.Mock(side_effect=[
            reporter, author,
            anonymous, anonymous,
        ])
        importer.annotate = mock.Mock(
            side_effect=['ad1', 'aad1', 'ad2', 'aad2'])
        importer.process_comments = mock.Mock()
        importer.process_bins = mock.Mock()
        project, user = mock.Mock(), mock.Mock()
        app = project.install_app.return_value
        app.config.options.mount_point = 'mount_point'
        app.config.options.import_id = {
            'source': 'Allura',
            'app_config_id': 'orig_id',
        }
        app.config.options.get = lambda *a: getattr(app.config.options, *a)
        app.url = 'foo'
        tickets = TM.Ticket.side_effect = [mock.Mock(), mock.Mock()]
        File.side_effect = ['f1', 'f2', 'f3', 'f4']

        importer.import_tool(project, user,
                             mount_point='mount_point', mount_label='mount_label')

        project.install_app.assert_called_once_with(
            'tickets', 'mount_point', 'mount_label',
            open_status_names='open statuses',
            closed_status_names='closed statuses',
            import_id={
                'source': 'Allura',
                'app_config_id': 'orig_id',
            },
            foo='bar',
        )
        self.assertEqual(importer.annotate.call_args_list, [
            mock.call('d1', author, 'at1', label=' owned'),
            mock.call('ad1', reporter, 'rb1', label=' created'),
            mock.call('d2', anonymous, 'at2', label=' owned'),
            mock.call('ad2', anonymous, 'rb2', label=' created'),
        ])
        self.assertEqual(TM.Ticket.call_args_list, [
            mock.call(
                app_config_id=app.config._id,
                import_id={
                    'source': 'Allura',
                    'app_config_id': 'orig_id',
                    'source_id': 1,
                },
                description='aad1',
                created_date=datetime(2013, 9, 1),
                mod_date=datetime(2013, 9, 2),
                ticket_num=1,
                summary='s1',
                custom_fields='cf1',
                status='st1',
                labels='l1',
                votes_down=1,
                votes_up=2,
                votes=1,
                assigned_to_id=author._id,
            ),
            mock.call(
                app_config_id=app.config._id,
                import_id={
                    'source': 'Allura',
                    'app_config_id': 'orig_id',
                    'source_id': 100,
                },
                description='aad2',
                created_date=datetime(2013, 9, 3),
                mod_date=datetime(2013, 9, 4),
                ticket_num=100,
                summary='s2',
                custom_fields='cf2',
                status='st2',
                labels='l2',
                votes_down=3,
                votes_up=5,
                votes=2,
                assigned_to_id=None,
            ),
        ])
        self.assertEqual(tickets[0].private, False)
        self.assertEqual(tickets[1].private, True)
        self.assertEqual(importer.process_comments.call_args_list, [
            mock.call(tickets[0], 'comments1'),
            mock.call(tickets[1], 'comments2'),
        ])
        self.assertEqual(tlos.flush_all.call_args_list, [
            mock.call(),
            mock.call(),
        ])
        self.assertEqual(session.return_value.flush.call_args_list, [
            mock.call(tickets[0]),
            mock.call(tickets[1]),
        ])
        self.assertEqual(session.return_value.expunge.call_args_list, [
            mock.call(tickets[0]),
            mock.call(tickets[1]),
        ])
        self.assertEqual(app.globals.custom_fields, 'fields')
        importer.process_bins.assert_called_once_with(app, 'bins')
        self.assertEqual(app.globals.last_ticket_num, 100)
        M.AuditLog.log.assert_called_once_with(
            'import tool mount_point from exported Allura JSON',
            project=project, user=user, url='foo')
        g.post_event.assert_called_once_with('project_updated')
        app.globals.invalidate_bin_counts.assert_called_once_with()
        self.assertEqual(File.call_args_list, [
            mock.call('u1'),
            mock.call('u2'),
            mock.call('u3'),
            mock.call('u4'),
        ])
        self.assertEqual(tickets[0].add_multiple_attachments.call_args_list, [
            mock.call(['f1', 'f2'])])
        self.assertEqual(tickets[1].add_multiple_attachments.call_args_list, [
            mock.call(['f3', 'f4']),
        ])

Example 32

Project: allura
Source File: test_tracker.py
View license
    @mock.patch.object(tracker, 'File')
    @mock.patch.object(tracker.h, 'make_app_admin_only')
    @mock.patch.object(tracker, 'g')
    @mock.patch.object(tracker, 'c')
    @mock.patch.object(tracker, 'ThreadLocalORMSession')
    @mock.patch.object(tracker, 'session')
    @mock.patch.object(tracker, 'M')
    @mock.patch.object(tracker, 'TM')
    def test_import_tool(self, TM, M, session, tlos, c, g, mao, File):
        importer = tracker.ForgeTrackerImporter()
        importer._load_json = mock.Mock(return_value={
            'tracker_config': {
                '_id': 'orig_id',
                'options': {
                    'foo': 'bar',
                },
            },
            'open_status_names': 'open statuses',
            'closed_status_names': 'closed statuses',
            'custom_fields': 'fields',
            'saved_bins': 'bins',
            'tickets': [
                {
                    'reported_by': 'rb1',
                    'assigned_to': 'at1',
                    'attachments': [{'url': 'u1'}, {'url': 'u2'}],
                    'ticket_num': 1,
                    'description': 'd1',
                    'created_date': '2013-09-01',
                    'mod_date': '2013-09-02',
                    'summary': 's1',
                    'custom_fields': 'cf1',
                    'status': 'st1',
                    'labels': 'l1',
                    'votes_down': 1,
                    'votes_up': 2,
                    'private': False,
                    'discussion_thread': {'posts': 'comments1'},
                },
                {
                    'reported_by': 'rb2',
                    'assigned_to': 'at2',
                    'ticket_num': 100,
                    'attachments': [{'url': 'u3'}, {'url': 'u4'}],
                    'description': 'd2',
                    'created_date': '2013-09-03',
                    'mod_date': '2013-09-04',
                    'summary': 's2',
                    'custom_fields': 'cf2',
                    'status': 'st2',
                    'labels': 'l2',
                    'votes_down': 3,
                    'votes_up': 5,
                    'private': True,
                    'discussion_thread': {'posts': 'comments2'},
                },
            ],
        })
        anonymous = mock.Mock(_id=None, is_anonymous=lambda: True)
        reporter = mock.Mock(is_anonymous=lambda: False)
        author = mock.Mock(is_anonymous=lambda: False)
        importer.get_user = mock.Mock(side_effect=[
            reporter, author,
            anonymous, anonymous,
        ])
        importer.annotate = mock.Mock(
            side_effect=['ad1', 'aad1', 'ad2', 'aad2'])
        importer.process_comments = mock.Mock()
        importer.process_bins = mock.Mock()
        project, user = mock.Mock(), mock.Mock()
        app = project.install_app.return_value
        app.config.options.mount_point = 'mount_point'
        app.config.options.import_id = {
            'source': 'Allura',
            'app_config_id': 'orig_id',
        }
        app.config.options.get = lambda *a: getattr(app.config.options, *a)
        app.url = 'foo'
        tickets = TM.Ticket.side_effect = [mock.Mock(), mock.Mock()]
        File.side_effect = ['f1', 'f2', 'f3', 'f4']

        importer.import_tool(project, user,
                             mount_point='mount_point', mount_label='mount_label')

        project.install_app.assert_called_once_with(
            'tickets', 'mount_point', 'mount_label',
            open_status_names='open statuses',
            closed_status_names='closed statuses',
            import_id={
                'source': 'Allura',
                'app_config_id': 'orig_id',
            },
            foo='bar',
        )
        self.assertEqual(importer.annotate.call_args_list, [
            mock.call('d1', author, 'at1', label=' owned'),
            mock.call('ad1', reporter, 'rb1', label=' created'),
            mock.call('d2', anonymous, 'at2', label=' owned'),
            mock.call('ad2', anonymous, 'rb2', label=' created'),
        ])
        self.assertEqual(TM.Ticket.call_args_list, [
            mock.call(
                app_config_id=app.config._id,
                import_id={
                    'source': 'Allura',
                    'app_config_id': 'orig_id',
                    'source_id': 1,
                },
                description='aad1',
                created_date=datetime(2013, 9, 1),
                mod_date=datetime(2013, 9, 2),
                ticket_num=1,
                summary='s1',
                custom_fields='cf1',
                status='st1',
                labels='l1',
                votes_down=1,
                votes_up=2,
                votes=1,
                assigned_to_id=author._id,
            ),
            mock.call(
                app_config_id=app.config._id,
                import_id={
                    'source': 'Allura',
                    'app_config_id': 'orig_id',
                    'source_id': 100,
                },
                description='aad2',
                created_date=datetime(2013, 9, 3),
                mod_date=datetime(2013, 9, 4),
                ticket_num=100,
                summary='s2',
                custom_fields='cf2',
                status='st2',
                labels='l2',
                votes_down=3,
                votes_up=5,
                votes=2,
                assigned_to_id=None,
            ),
        ])
        self.assertEqual(tickets[0].private, False)
        self.assertEqual(tickets[1].private, True)
        self.assertEqual(importer.process_comments.call_args_list, [
            mock.call(tickets[0], 'comments1'),
            mock.call(tickets[1], 'comments2'),
        ])
        self.assertEqual(tlos.flush_all.call_args_list, [
            mock.call(),
            mock.call(),
        ])
        self.assertEqual(session.return_value.flush.call_args_list, [
            mock.call(tickets[0]),
            mock.call(tickets[1]),
        ])
        self.assertEqual(session.return_value.expunge.call_args_list, [
            mock.call(tickets[0]),
            mock.call(tickets[1]),
        ])
        self.assertEqual(app.globals.custom_fields, 'fields')
        importer.process_bins.assert_called_once_with(app, 'bins')
        self.assertEqual(app.globals.last_ticket_num, 100)
        M.AuditLog.log.assert_called_once_with(
            'import tool mount_point from exported Allura JSON',
            project=project, user=user, url='foo')
        g.post_event.assert_called_once_with('project_updated')
        app.globals.invalidate_bin_counts.assert_called_once_with()
        self.assertEqual(File.call_args_list, [
            mock.call('u1'),
            mock.call('u2'),
            mock.call('u3'),
            mock.call('u4'),
        ])
        self.assertEqual(tickets[0].add_multiple_attachments.call_args_list, [
            mock.call(['f1', 'f2'])])
        self.assertEqual(tickets[1].add_multiple_attachments.call_args_list, [
            mock.call(['f3', 'f4']),
        ])

Example 33

Project: rtv
Source File: test_page.py
View license
def test_page_unauthenticated(reddit, terminal, config, oauth):

    page = Page(reddit, terminal, config, oauth)
    page.controller = PageController(page, keymap=config.keymap)
    with mock.patch.object(page, 'refresh_content'), \
            mock.patch.object(page, 'content'),      \
            mock.patch.object(page, 'nav'),          \
            mock.patch.object(page, 'draw'):

        # Loop
        def func(_):
            page.active = False
        with mock.patch.object(page, 'controller'):
            page.controller.trigger = mock.MagicMock(side_effect=func)
            page.loop()
        assert page.draw.called

        # Quit, confirm
        terminal.stdscr.getch.return_value = ord('y')
        with mock.patch('sys.exit') as sys_exit:
            page.controller.trigger('q')
        assert sys_exit.called

        # Quit, deny
        terminal.stdscr.getch.return_value = terminal.ESCAPE
        with mock.patch('sys.exit') as sys_exit:
            page.controller.trigger('q')
        assert not sys_exit.called

        # Force quit
        terminal.stdscr.getch.return_value = terminal.ESCAPE
        with mock.patch('sys.exit') as sys_exit:
            page.controller.trigger('Q')
        assert sys_exit.called

        # Show help
        with mock.patch('subprocess.Popen') as Popen:
            page.controller.trigger('?')
        assert Popen.called

        # Sort content
        page.controller.trigger('1')
        page.refresh_content.assert_called_with(order='hot')
        page.controller.trigger('2')
        page.refresh_content.assert_called_with(order='top')
        page.controller.trigger('3')
        page.refresh_content.assert_called_with(order='rising')
        page.controller.trigger('4')
        page.refresh_content.assert_called_with(order='new')
        page.controller.trigger('5')
        page.refresh_content.assert_called_with(order='controversial')

        logged_in_methods = [
            'a',  # Upvote
            'z',  # Downvote
            'd',  # Delete
            'e',  # Edit
            'i',  # Get inbox
        ]
        for ch in logged_in_methods:
            page.controller.trigger(ch)
            message = 'Not logged in'.encode('utf-8')
            terminal.stdscr.subwin.addstr.assert_called_with(1, 1, message)
            terminal.stdscr.subwin.addstr.reset_mock()

Example 34

Project: rtv
Source File: test_page.py
View license
def test_page_unauthenticated(reddit, terminal, config, oauth):

    page = Page(reddit, terminal, config, oauth)
    page.controller = PageController(page, keymap=config.keymap)
    with mock.patch.object(page, 'refresh_content'), \
            mock.patch.object(page, 'content'),      \
            mock.patch.object(page, 'nav'),          \
            mock.patch.object(page, 'draw'):

        # Loop
        def func(_):
            page.active = False
        with mock.patch.object(page, 'controller'):
            page.controller.trigger = mock.MagicMock(side_effect=func)
            page.loop()
        assert page.draw.called

        # Quit, confirm
        terminal.stdscr.getch.return_value = ord('y')
        with mock.patch('sys.exit') as sys_exit:
            page.controller.trigger('q')
        assert sys_exit.called

        # Quit, deny
        terminal.stdscr.getch.return_value = terminal.ESCAPE
        with mock.patch('sys.exit') as sys_exit:
            page.controller.trigger('q')
        assert not sys_exit.called

        # Force quit
        terminal.stdscr.getch.return_value = terminal.ESCAPE
        with mock.patch('sys.exit') as sys_exit:
            page.controller.trigger('Q')
        assert sys_exit.called

        # Show help
        with mock.patch('subprocess.Popen') as Popen:
            page.controller.trigger('?')
        assert Popen.called

        # Sort content
        page.controller.trigger('1')
        page.refresh_content.assert_called_with(order='hot')
        page.controller.trigger('2')
        page.refresh_content.assert_called_with(order='top')
        page.controller.trigger('3')
        page.refresh_content.assert_called_with(order='rising')
        page.controller.trigger('4')
        page.refresh_content.assert_called_with(order='new')
        page.controller.trigger('5')
        page.refresh_content.assert_called_with(order='controversial')

        logged_in_methods = [
            'a',  # Upvote
            'z',  # Downvote
            'd',  # Delete
            'e',  # Edit
            'i',  # Get inbox
        ]
        for ch in logged_in_methods:
            page.controller.trigger(ch)
            message = 'Not logged in'.encode('utf-8')
            terminal.stdscr.subwin.addstr.assert_called_with(1, 1, message)
            terminal.stdscr.subwin.addstr.reset_mock()

Example 35

Project: kozmic-ci
Source File: unit_tests.py
View license
    def test_set_status(self):
        description = 'Something went very wrong'

        # 1. There are no members with email addresses
        build_1 = factories.BuildFactory.create(project=self.project)

        with mail.record_messages() as outbox:
            with mock.patch.object(Project, 'gh') as gh_repo_mock:
                build_1.set_status('failure', description=description)

        assert not outbox
        gh_repo_mock.create_status.assert_called_once_with(
            build_1.gh_commit_sha,
            'failure',
            target_url=build_1.url,
            description=description, context='Kozmic-CI')

        # 2. There are members with email addresses
        member_1 = factories.UserFactory.create(email='[email protected]')
        member_2 = factories.UserFactory.create(email='[email protected]')
        for member in [member_1, member_2]:
            factories.MembershipFactory.create(user=member, project=self.project)

        build_2 = factories.BuildFactory.create(project=self.project)

        with mail.record_messages() as outbox:
            with mock.patch.object(Project, 'gh') as gh_repo_mock:
                build_2.set_status('failure', description=description)

        assert len(outbox) == 1
        message = outbox[0]
        assert self.project.gh_full_name in message.subject
        assert 'failure' in message.subject
        assert build_2.gh_commit_ref in message.subject
        assert build_2.url in message.html

        gh_repo_mock.create_status.assert_called_once_with(
            build_2.gh_commit_sha,
            'failure',
            target_url=build_2.url,
            description=description, context='Kozmic-CI')

        # 3. Repeat the same `set_status` call and make sure that we
        # will not be notified the second time
        with mail.record_messages() as outbox:
            with mock.patch.object(Project, 'gh') as gh_repo_mock:
                build_2.set_status('failure', description=description)

        assert not outbox
        assert not gh_repo_mock.create_status.called

Example 36

Project: kozmic-ci
Source File: unit_tests.py
View license
    def test_set_status(self):
        description = 'Something went very wrong'

        # 1. There are no members with email addresses
        build_1 = factories.BuildFactory.create(project=self.project)

        with mail.record_messages() as outbox:
            with mock.patch.object(Project, 'gh') as gh_repo_mock:
                build_1.set_status('failure', description=description)

        assert not outbox
        gh_repo_mock.create_status.assert_called_once_with(
            build_1.gh_commit_sha,
            'failure',
            target_url=build_1.url,
            description=description, context='Kozmic-CI')

        # 2. There are members with email addresses
        member_1 = factories.UserFactory.create(email='[email protected]')
        member_2 = factories.UserFactory.create(email='[email protected]')
        for member in [member_1, member_2]:
            factories.MembershipFactory.create(user=member, project=self.project)

        build_2 = factories.BuildFactory.create(project=self.project)

        with mail.record_messages() as outbox:
            with mock.patch.object(Project, 'gh') as gh_repo_mock:
                build_2.set_status('failure', description=description)

        assert len(outbox) == 1
        message = outbox[0]
        assert self.project.gh_full_name in message.subject
        assert 'failure' in message.subject
        assert build_2.gh_commit_ref in message.subject
        assert build_2.url in message.html

        gh_repo_mock.create_status.assert_called_once_with(
            build_2.gh_commit_sha,
            'failure',
            target_url=build_2.url,
            description=description, context='Kozmic-CI')

        # 3. Repeat the same `set_status` call and make sure that we
        # will not be notified the second time
        with mail.record_messages() as outbox:
            with mock.patch.object(Project, 'gh') as gh_repo_mock:
                build_2.set_status('failure', description=description)

        assert not outbox
        assert not gh_repo_mock.create_status.called

Example 37

Project: cloud-init
Source File: test_netconfig.py
View license
    def test_write_ipv6_rhel(self):
        rh_distro = self._get_distro('rhel')

        write_bufs = {}

        def replace_write(filename, content, mode=0o644, omode="wb"):
            buf = WriteBuffer()
            buf.mode = mode
            buf.omode = omode
            buf.write(content)
            write_bufs[filename] = buf

        with ExitStack() as mocks:
            mocks.enter_context(
                mock.patch.object(util, 'write_file', replace_write))
            mocks.enter_context(
                mock.patch.object(util, 'load_file', return_value=''))
            mocks.enter_context(
                mock.patch.object(os.path, 'isfile', return_value=False))

            rh_distro.apply_network(BASE_NET_CFG_IPV6, False)

            self.assertEqual(len(write_bufs), 4)
            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo']
            expected_buf = '''
DEVICE="lo"
ONBOOT=yes
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0']
            expected_buf = '''
DEVICE="eth0"
BOOTPROTO="static"
NETMASK="255.255.255.0"
IPADDR="192.168.1.5"
ONBOOT=yes
GATEWAY="192.168.1.254"
BROADCAST="192.168.1.0"
IPV6INIT=yes
IPV6ADDR="2607:f0d0:1002:0011::2"
IPV6_DEFAULTGW="2607:f0d0:1002:0011::1"
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)
            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1']
            expected_buf = '''
DEVICE="eth1"
BOOTPROTO="static"
NETMASK="255.255.255.0"
IPADDR="192.168.1.6"
ONBOOT=no
GATEWAY="192.168.1.254"
BROADCAST="192.168.1.0"
IPV6INIT=yes
IPV6ADDR="2607:f0d0:1002:0011::3"
IPV6_DEFAULTGW="2607:f0d0:1002:0011::1"
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network', write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network']
            expected_buf = '''
# Created by cloud-init v. 0.7
NETWORKING=yes
NETWORKING_IPV6=yes
IPV6_AUTOCONF=no
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

Example 38

Project: cloud-init
Source File: test_netconfig.py
View license
    def test_simple_write_rh(self):
        rh_distro = self._get_distro('rhel')

        write_bufs = {}

        def replace_write(filename, content, mode=0o644, omode="wb"):
            buf = WriteBuffer()
            buf.mode = mode
            buf.omode = omode
            buf.write(content)
            write_bufs[filename] = buf

        with ExitStack() as mocks:
            mocks.enter_context(
                mock.patch.object(util, 'write_file', replace_write))
            mocks.enter_context(
                mock.patch.object(util, 'load_file', return_value=''))
            mocks.enter_context(
                mock.patch.object(os.path, 'isfile', return_value=False))

            rh_distro.apply_network(BASE_NET_CFG, False)

            self.assertEqual(len(write_bufs), 4)
            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo']
            expected_buf = '''
DEVICE="lo"
ONBOOT=yes
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0']
            expected_buf = '''
DEVICE="eth0"
BOOTPROTO="static"
NETMASK="255.255.255.0"
IPADDR="192.168.1.5"
ONBOOT=yes
GATEWAY="192.168.1.254"
BROADCAST="192.168.1.0"
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1']
            expected_buf = '''
DEVICE="eth1"
BOOTPROTO="dhcp"
ONBOOT=yes
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network', write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network']
            expected_buf = '''
# Created by cloud-init v. 0.7
NETWORKING=yes
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

Example 39

Project: cloud-init
Source File: test_netconfig.py
View license
    def test_write_ipv6_rhel(self):
        rh_distro = self._get_distro('rhel')

        write_bufs = {}

        def replace_write(filename, content, mode=0o644, omode="wb"):
            buf = WriteBuffer()
            buf.mode = mode
            buf.omode = omode
            buf.write(content)
            write_bufs[filename] = buf

        with ExitStack() as mocks:
            mocks.enter_context(
                mock.patch.object(util, 'write_file', replace_write))
            mocks.enter_context(
                mock.patch.object(util, 'load_file', return_value=''))
            mocks.enter_context(
                mock.patch.object(os.path, 'isfile', return_value=False))

            rh_distro.apply_network(BASE_NET_CFG_IPV6, False)

            self.assertEqual(len(write_bufs), 4)
            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-lo',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-lo']
            expected_buf = '''
DEVICE="lo"
ONBOOT=yes
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth0',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth0']
            expected_buf = '''
DEVICE="eth0"
BOOTPROTO="static"
NETMASK="255.255.255.0"
IPADDR="192.168.1.5"
ONBOOT=yes
GATEWAY="192.168.1.254"
BROADCAST="192.168.1.0"
IPV6INIT=yes
IPV6ADDR="2607:f0d0:1002:0011::2"
IPV6_DEFAULTGW="2607:f0d0:1002:0011::1"
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)
            self.assertIn('/etc/sysconfig/network-scripts/ifcfg-eth1',
                          write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network-scripts/ifcfg-eth1']
            expected_buf = '''
DEVICE="eth1"
BOOTPROTO="static"
NETMASK="255.255.255.0"
IPADDR="192.168.1.6"
ONBOOT=no
GATEWAY="192.168.1.254"
BROADCAST="192.168.1.0"
IPV6INIT=yes
IPV6ADDR="2607:f0d0:1002:0011::3"
IPV6_DEFAULTGW="2607:f0d0:1002:0011::1"
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

            self.assertIn('/etc/sysconfig/network', write_bufs)
            write_buf = write_bufs['/etc/sysconfig/network']
            expected_buf = '''
# Created by cloud-init v. 0.7
NETWORKING=yes
NETWORKING_IPV6=yes
IPV6_AUTOCONF=no
'''
            self.assertCfgEquals(expected_buf, str(write_buf))
            self.assertEqual(write_buf.mode, 0o644)

Example 40

Project: nupic
Source File: record_stream_test.py
View license
  def testGetNextRecordDictWithResetFieldWithoutSequenceField(self):
    fields = [
      FieldMetaInfo('name', FieldMetaType.string,
                    FieldMetaSpecial.none),
      FieldMetaInfo('timestamp', FieldMetaType.datetime,
                    FieldMetaSpecial.timestamp),
      FieldMetaInfo('integer', FieldMetaType.integer,
                    FieldMetaSpecial.none),
      FieldMetaInfo('real', FieldMetaType.float,
                    FieldMetaSpecial.none),
      FieldMetaInfo('reset', FieldMetaType.integer,
                    FieldMetaSpecial.reset),
      FieldMetaInfo('categories', FieldMetaType.list,
                    FieldMetaSpecial.category)
    ]


    stream = self.MyRecordStream(fields)


    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_1', datetime(day=1, month=3, year=2010), 5, 6.5, 1,
                      [0, 1, 2]]):

      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_1',
          'timestamp': datetime(2010, 3, 1, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 1,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 1,
          '_sequenceId': 0,
          '_timestamp': datetime(2010, 3, 1, 0, 0),
          '_timestampRecordIdx': None })

    # One more time to verify incremeting sequence id
    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_2', datetime(day=2, month=3, year=2010), 5, 6.5, 1,
                      [0, 1, 2]]):

      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_2',
          'timestamp': datetime(2010, 3, 2, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 1,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 1,
          '_sequenceId': 1,
          '_timestamp': datetime(2010, 3, 2, 0, 0),
          '_timestampRecordIdx': None })

    # Now with reset turned off, expecting no change to sequence id
    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_3', datetime(day=3, month=3, year=2010), 5, 6.5, 0,
                      [0, 1, 2]]):

      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_3',
          'timestamp': datetime(2010, 3, 3, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 0,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 0,
          '_sequenceId': 1,
          '_timestamp': datetime(2010, 3, 3, 0, 0),
          '_timestampRecordIdx': None })

    # Now check that rewind resets sequence id
    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_4', datetime(day=4, month=3, year=2010), 5, 6.5, 1,
                      [0, 1, 2]]):
      stream.rewind()
      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_4',
          'timestamp': datetime(2010, 3, 4, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 1,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 1,
          '_sequenceId': 0,
          '_timestamp': datetime(2010, 3, 4, 0, 0),
          '_timestampRecordIdx': None })

Example 41

Project: nupic
Source File: record_stream_test.py
View license
  def testGetNextRecordDictWithResetFieldWithoutSequenceField(self):
    fields = [
      FieldMetaInfo('name', FieldMetaType.string,
                    FieldMetaSpecial.none),
      FieldMetaInfo('timestamp', FieldMetaType.datetime,
                    FieldMetaSpecial.timestamp),
      FieldMetaInfo('integer', FieldMetaType.integer,
                    FieldMetaSpecial.none),
      FieldMetaInfo('real', FieldMetaType.float,
                    FieldMetaSpecial.none),
      FieldMetaInfo('reset', FieldMetaType.integer,
                    FieldMetaSpecial.reset),
      FieldMetaInfo('categories', FieldMetaType.list,
                    FieldMetaSpecial.category)
    ]


    stream = self.MyRecordStream(fields)


    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_1', datetime(day=1, month=3, year=2010), 5, 6.5, 1,
                      [0, 1, 2]]):

      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_1',
          'timestamp': datetime(2010, 3, 1, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 1,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 1,
          '_sequenceId': 0,
          '_timestamp': datetime(2010, 3, 1, 0, 0),
          '_timestampRecordIdx': None })

    # One more time to verify incremeting sequence id
    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_2', datetime(day=2, month=3, year=2010), 5, 6.5, 1,
                      [0, 1, 2]]):

      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_2',
          'timestamp': datetime(2010, 3, 2, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 1,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 1,
          '_sequenceId': 1,
          '_timestamp': datetime(2010, 3, 2, 0, 0),
          '_timestampRecordIdx': None })

    # Now with reset turned off, expecting no change to sequence id
    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_3', datetime(day=3, month=3, year=2010), 5, 6.5, 0,
                      [0, 1, 2]]):

      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_3',
          'timestamp': datetime(2010, 3, 3, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 0,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 0,
          '_sequenceId': 1,
          '_timestamp': datetime(2010, 3, 3, 0, 0),
          '_timestampRecordIdx': None })

    # Now check that rewind resets sequence id
    with mock.patch.object(
        stream, 'getNextRecord', autospec=True,
        return_value=['rec_4', datetime(day=4, month=3, year=2010), 5, 6.5, 1,
                      [0, 1, 2]]):
      stream.rewind()
      result = stream.getNextRecordDict()

      self.assertEqual(
        result,
        {
          'name': 'rec_4',
          'timestamp': datetime(2010, 3, 4, 0, 0),
          'integer': 5,
          'real': 6.5,
          'reset': 1,
          'categories': [0, 1, 2],
          '_category': [0, 1, 2],
          '_reset': 1,
          '_sequenceId': 0,
          '_timestamp': datetime(2010, 3, 4, 0, 0),
          '_timestampRecordIdx': None })

Example 42

Project: ceilometer
Source File: test_inspector.py
View license
    def test_inspect_vnics(self):
        dom_xml = """
             <domain type='kvm'>
                 <devices>
                    <!-- NOTE(dprince): interface with no target -->
                    <interface type='bridge'>
                       <mac address='fa:16:3e:93:31:5a'/>
                       <source bridge='br100'/>
                       <model type='virtio'/>
                       <address type='pci' domain='0x0000' bus='0x00' \
                       slot='0x03' function='0x0'/>
                    </interface>
                    <!-- NOTE(dprince): interface with no mac -->
                    <interface type='bridge'>
                       <source bridge='br100'/>
                       <target dev='foo'/>
                       <model type='virtio'/>
                       <address type='pci' domain='0x0000' bus='0x00' \
                       slot='0x03' function='0x0'/>
                    </interface>
                    <interface type='bridge'>
                       <mac address='fa:16:3e:71:ec:6d'/>
                       <source bridge='br100'/>
                       <target dev='vnet0'/>
                       <filterref filter=
                        'nova-instance-00000001-fa163e71ec6d'>
                         <parameter name='DHCPSERVER' value='10.0.0.1'/>
                         <parameter name='IP' value='10.0.0.2'/>
                         <parameter name='PROJMASK' value='255.255.255.0'/>
                         <parameter name='PROJNET' value='10.0.0.0'/>
                       </filterref>
                       <alias name='net0'/>
                     </interface>
                     <interface type='bridge'>
                       <mac address='fa:16:3e:71:ec:6e'/>
                       <source bridge='br100'/>
                       <target dev='vnet1'/>
                       <filterref filter=
                        'nova-instance-00000001-fa163e71ec6e'>
                         <parameter name='DHCPSERVER' value='192.168.0.1'/>
                         <parameter name='IP' value='192.168.0.2'/>
                         <parameter name='PROJMASK' value='255.255.255.0'/>
                         <parameter name='PROJNET' value='192.168.0.0'/>
                       </filterref>
                       <alias name='net1'/>
                     </interface>
                     <interface type='bridge'>
                       <mac address='fa:16:3e:96:33:f0'/>
                       <source bridge='qbr420008b3-7c'/>
                       <target dev='vnet2'/>
                       <model type='virtio'/>
                       <address type='pci' domain='0x0000' bus='0x00' \
                       slot='0x03' function='0x0'/>
                    </interface>
                 </devices>
             </domain>
        """

        interface_stats = {
            'vnet0': (1, 2, 0, 0, 3, 4, 0, 0),
            'vnet1': (5, 6, 0, 0, 7, 8, 0, 0),
            'vnet2': (9, 10, 0, 0, 11, 12, 0, 0),
        }
        interfaceStats = interface_stats.__getitem__

        connection = self.inspector.connection
        with contextlib.ExitStack() as stack:
            stack.enter_context(mock.patch.object(connection,
                                                  'lookupByUUIDString',
                                                  return_value=self.domain))
            stack.enter_context(mock.patch.object(self.domain, 'XMLDesc',
                                                  return_value=dom_xml))
            stack.enter_context(mock.patch.object(self.domain,
                                                  'interfaceStats',
                                                  side_effect=interfaceStats))
            stack.enter_context(mock.patch.object(self.domain, 'info',
                                                  return_value=(0, 0, 0,
                                                                2, 999999)))
            interfaces = list(self.inspector.inspect_vnics(self.instance))

            self.assertEqual(3, len(interfaces))
            vnic0, info0 = interfaces[0]
            self.assertEqual('vnet0', vnic0.name)
            self.assertEqual('fa:16:3e:71:ec:6d', vnic0.mac)
            self.assertEqual('nova-instance-00000001-fa163e71ec6d', vnic0.fref)
            self.assertEqual('255.255.255.0', vnic0.parameters.get('projmask'))
            self.assertEqual('10.0.0.2', vnic0.parameters.get('ip'))
            self.assertEqual('10.0.0.0', vnic0.parameters.get('projnet'))
            self.assertEqual('10.0.0.1', vnic0.parameters.get('dhcpserver'))
            self.assertEqual(1, info0.rx_bytes)
            self.assertEqual(2, info0.rx_packets)
            self.assertEqual(3, info0.tx_bytes)
            self.assertEqual(4, info0.tx_packets)

            vnic1, info1 = interfaces[1]
            self.assertEqual('vnet1', vnic1.name)
            self.assertEqual('fa:16:3e:71:ec:6e', vnic1.mac)
            self.assertEqual('nova-instance-00000001-fa163e71ec6e', vnic1.fref)
            self.assertEqual('255.255.255.0', vnic1.parameters.get('projmask'))
            self.assertEqual('192.168.0.2', vnic1.parameters.get('ip'))
            self.assertEqual('192.168.0.0', vnic1.parameters.get('projnet'))
            self.assertEqual('192.168.0.1', vnic1.parameters.get('dhcpserver'))
            self.assertEqual(5, info1.rx_bytes)
            self.assertEqual(6, info1.rx_packets)
            self.assertEqual(7, info1.tx_bytes)
            self.assertEqual(8, info1.tx_packets)

            vnic2, info2 = interfaces[2]
            self.assertEqual('vnet2', vnic2.name)
            self.assertEqual('fa:16:3e:96:33:f0', vnic2.mac)
            self.assertIsNone(vnic2.fref)
            self.assertEqual(dict(), vnic2.parameters)
            self.assertEqual(9, info2.rx_bytes)
            self.assertEqual(10, info2.rx_packets)
            self.assertEqual(11, info2.tx_bytes)
            self.assertEqual(12, info2.tx_packets)

Example 43

Project: cinder
Source File: test_admin_actions.py
View license
    def test_volume_force_detach_raises_remote_error(self):
        # current status is available
        volume = self._create_volume(self.ctx, {'provider_location': '',
                                                'size': 1})
        connector = {'initiator': 'iqn.2012-07.org.fake:01'}

        self.volume_api.reserve_volume(self.ctx, volume)
        mountpoint = '/dev/vbd'
        attachment = self.volume_api.attach(self.ctx, volume, fake.INSTANCE_ID,
                                            None, mountpoint, 'rw')
        # volume is attached
        volume.refresh()
        self.assertEqual('in-use', volume.status)
        self.assertEqual(fake.INSTANCE_ID, attachment['instance_uuid'])
        self.assertEqual(mountpoint, attachment['mountpoint'])
        self.assertEqual('attached', attachment['attach_status'])
        admin_metadata = volume.admin_metadata
        self.assertEqual(2, len(admin_metadata))
        self.assertEqual('False', admin_metadata['readonly'])
        self.assertEqual('rw', admin_metadata['attached_mode'])
        conn_info = self.volume_api.initialize_connection(self.ctx,
                                                          volume,
                                                          connector)
        self.assertEqual('rw', conn_info['data']['access_mode'])
        # build request to force detach
        volume_remote_error = \
            messaging.RemoteError(exc_type='VolumeAttachmentNotFound')
        with mock.patch.object(volume_api.API, 'detach',
                               side_effect=volume_remote_error):
            req = webob.Request.blank('/v2/%s/volumes/%s/action' % (
                fake.PROJECT_ID, volume.id))
            req.method = 'POST'
            req.headers['content-type'] = 'application/json'
            body = {'os-force_detach': {'attachment_id': fake.ATTACHMENT_ID}}
            req.body = jsonutils.dump_as_bytes(body)
            # attach admin context to request
            req.environ['cinder.context'] = self.ctx
            # make request
            resp = req.get_response(app())
            self.assertEqual(400, resp.status_int)

        # test for KeyError when missing connector
        volume_remote_error = (
            messaging.RemoteError(exc_type='KeyError'))
        with mock.patch.object(volume_api.API, 'detach',
                               side_effect=volume_remote_error):
            req = webob.Request.blank('/v2/%s/volumes/%s/action' % (
                fake.PROJECT_ID, volume.id))
            req.method = 'POST'
            req.headers['content-type'] = 'application/json'
            body = {'os-force_detach': {'attachment_id': fake.ATTACHMENT_ID}}
            req.body = jsonutils.dump_as_bytes(body)
            # attach admin context to request
            req.environ['cinder.context'] = self.ctx
            # make request
            self.assertRaises(messaging.RemoteError,
                              req.get_response,
                              app())

        # test for VolumeBackendAPIException
        volume_remote_error = (
            messaging.RemoteError(exc_type='VolumeBackendAPIException'))
        with mock.patch.object(volume_api.API, 'detach',
                               side_effect=volume_remote_error):
            req = webob.Request.blank('/v2/%s/volumes/%s/action' % (
                fake.PROJECT_ID, volume.id))
            req.method = 'POST'
            req.headers['content-type'] = 'application/json'
            body = {'os-force_detach': {'attachment_id': fake.ATTACHMENT_ID,
                                        'connector': connector}}
            req.body = jsonutils.dump_as_bytes(body)

            # attach admin context to request
            req.environ['cinder.context'] = self.ctx
            # make request
            self.assertRaises(messaging.RemoteError,
                              req.get_response,
                              app())

Example 44

Project: cinder
Source File: test_cg.py
View license
    @mock.patch.object(CGQUOTAS, "reserve",
                       return_value=["RESERVATION"])
    @mock.patch.object(CGQUOTAS, "commit")
    @mock.patch.object(CGQUOTAS, "rollback")
    @mock.patch.object(driver.VolumeDriver,
                       "create_consistencygroup",
                       return_value={'status': 'available'})
    @mock.patch.object(driver.VolumeDriver,
                       "update_consistencygroup")
    def test_update_consistencygroup(self, fake_update_cg,
                                     fake_create_cg, fake_rollback,
                                     fake_commit, fake_reserve):
        """Test consistencygroup can be updated."""
        group = tests_utils.create_consistencygroup(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            volume_type='type1,type2')
        self.volume.create_consistencygroup(self.context, group)

        volume = tests_utils.create_volume(
            self.context,
            consistencygroup_id=group.id,
            **self.volume_params)
        self.volume.create_volume(self.context, volume)

        volume2 = tests_utils.create_volume(
            self.context,
            consistencygroup_id=None,
            **self.volume_params)
        self.volume.create_volume(self.context, volume2)

        fake_update_cg.return_value = (
            {'status': fields.ConsistencyGroupStatus.AVAILABLE},
            [{'id': volume2.id, 'status': 'available'}],
            [{'id': volume.id, 'status': 'available'}])

        self.volume.update_consistencygroup(self.context, group,
                                            add_volumes=volume2.id,
                                            remove_volumes=volume.id)
        cg = objects.ConsistencyGroup.get_by_id(self.context, group.id)
        expected = {
            'status': fields.ConsistencyGroupStatus.AVAILABLE,
            'name': 'test_cg',
            'availability_zone': 'nova',
            'tenant_id': self.context.project_id,
            'created_at': 'DONTCARE',
            'user_id': fake.USER_ID,
            'consistencygroup_id': group.id
        }
        self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE, cg.status)
        self.assertEqual(10, len(self.notifier.notifications),
                         self.notifier.notifications)
        msg = self.notifier.notifications[6]
        self.assertEqual('consistencygroup.update.start', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])
        msg = self.notifier.notifications[8]
        self.assertEqual('consistencygroup.update.end', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])
        cgvolumes = db.volume_get_all_by_group(self.context, group.id)
        cgvol_ids = [cgvol['id'] for cgvol in cgvolumes]
        # Verify volume is removed.
        self.assertNotIn(volume.id, cgvol_ids)
        # Verify volume is added.
        self.assertIn(volume2.id, cgvol_ids)

        self.volume_params['status'] = 'wrong-status'
        volume3 = tests_utils.create_volume(
            self.context,
            consistencygroup_id=None,
            **self.volume_params)
        volume_id3 = volume3['id']

        volume_get_orig = self.volume.db.volume_get
        self.volume.db.volume_get = mock.Mock(
            return_value={'status': 'wrong_status',
                          'id': volume_id3})
        # Try to add a volume in wrong status
        self.assertRaises(exception.InvalidVolume,
                          self.volume.update_consistencygroup,
                          self.context,
                          group,
                          add_volumes=volume_id3,
                          remove_volumes=None)
        self.volume.db.volume_get.reset_mock()
        self.volume.db.volume_get = volume_get_orig

Example 45

Project: cinder
Source File: test_cg.py
View license
    @mock.patch.object(driver.VolumeDriver,
                       "create_consistencygroup",
                       return_value={'status': 'available'})
    @mock.patch.object(driver.VolumeDriver,
                       "delete_consistencygroup",
                       return_value=({'status': 'deleted'}, []))
    @mock.patch.object(driver.VolumeDriver,
                       "create_cgsnapshot",
                       return_value={'status': 'available'})
    @mock.patch.object(driver.VolumeDriver,
                       "delete_cgsnapshot",
                       return_value=({'status': 'deleted'}, []))
    @mock.patch.object(driver.VolumeDriver,
                       "create_consistencygroup_from_src",
                       return_value=(None, None))
    @mock.patch('cinder.volume.drivers.lvm.LVMVolumeDriver.'
                'create_volume_from_snapshot')
    @mock.patch('cinder.volume.drivers.lvm.LVMVolumeDriver.'
                'create_cloned_volume')
    def test_create_consistencygroup_from_src(self,
                                              mock_create_cloned_vol,
                                              mock_create_vol_from_snap,
                                              mock_create_from_src,
                                              mock_delete_cgsnap,
                                              mock_create_cgsnap,
                                              mock_delete_cg,
                                              mock_create_cg):
        """Test consistencygroup can be created and deleted."""
        group = tests_utils.create_consistencygroup(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            volume_type='type1,type2',
            status=fields.ConsistencyGroupStatus.AVAILABLE)
        volume = tests_utils.create_volume(
            self.context,
            consistencygroup_id=group.id,
            status='available',
            host=CONF.host,
            size=1)
        volume_id = volume['id']
        cgsnapshot_returns = self._create_cgsnapshot(group.id, [volume_id])
        cgsnapshot = cgsnapshot_returns[0]
        snapshot_id = cgsnapshot_returns[1][0]['id']

        # Create CG from source CG snapshot.
        group2 = tests_utils.create_consistencygroup(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            volume_type='type1,type2',
            cgsnapshot_id=cgsnapshot.id)
        group2 = objects.ConsistencyGroup.get_by_id(self.context, group2.id)
        volume2 = tests_utils.create_volume(
            self.context,
            consistencygroup_id=group2.id,
            snapshot_id=snapshot_id,
            **self.volume_params)
        self.volume.create_volume(self.context, volume2)
        self.volume.create_consistencygroup_from_src(
            self.context, group2, cgsnapshot=cgsnapshot)
        cg2 = objects.ConsistencyGroup.get_by_id(self.context, group2.id)
        expected = {
            'status': fields.ConsistencyGroupStatus.AVAILABLE,
            'name': 'test_cg',
            'availability_zone': 'nova',
            'tenant_id': self.context.project_id,
            'created_at': 'DONTCARE',
            'user_id': fake.USER_ID,
            'consistencygroup_id': group2.id,
        }
        self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE, cg2.status)
        self.assertEqual(group2.id, cg2['id'])
        self.assertEqual(cgsnapshot.id, cg2['cgsnapshot_id'])
        self.assertIsNone(cg2['source_cgid'])

        msg = self.notifier.notifications[2]
        self.assertEqual('consistencygroup.create.start', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])
        msg = self.notifier.notifications[4]
        self.assertEqual('consistencygroup.create.end', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])

        if len(self.notifier.notifications) > 6:
            self.assertFalse(self.notifier.notifications[6],
                             self.notifier.notifications)
        self.assertEqual(6, len(self.notifier.notifications),
                         self.notifier.notifications)

        self.volume.delete_consistencygroup(self.context, group2)

        if len(self.notifier.notifications) > 10:
            self.assertFalse(self.notifier.notifications[10],
                             self.notifier.notifications)
        self.assertEqual(10, len(self.notifier.notifications),
                         self.notifier.notifications)

        msg = self.notifier.notifications[6]
        self.assertEqual('consistencygroup.delete.start', msg['event_type'])
        expected['status'] = fields.ConsistencyGroupStatus.AVAILABLE
        self.assertDictMatch(expected, msg['payload'])
        msg = self.notifier.notifications[8]
        self.assertEqual('consistencygroup.delete.end', msg['event_type'])
        expected['status'] = fields.ConsistencyGroupStatus.DELETED
        self.assertDictMatch(expected, msg['payload'])

        cg2 = objects.ConsistencyGroup.get_by_id(
            context.get_admin_context(read_deleted='yes'), group2.id)
        self.assertEqual(fields.ConsistencyGroupStatus.DELETED, cg2.status)
        self.assertRaises(exception.NotFound,
                          objects.ConsistencyGroup.get_by_id,
                          self.context,
                          group2.id)

        # Create CG from source CG.
        group3 = tests_utils.create_consistencygroup(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            volume_type='type1,type2',
            source_cgid=group.id)
        volume3 = tests_utils.create_volume(
            self.context,
            consistencygroup_id=group3.id,
            source_volid=volume_id,
            **self.volume_params)
        self.volume.create_volume(self.context, volume3)
        self.volume.create_consistencygroup_from_src(
            self.context, group3, source_cg=group)

        cg3 = objects.ConsistencyGroup.get_by_id(self.context, group3.id)

        self.assertEqual(fields.ConsistencyGroupStatus.AVAILABLE, cg3.status)
        self.assertEqual(group3.id, cg3.id)
        self.assertEqual(group.id, cg3.source_cgid)
        self.assertIsNone(cg3.cgsnapshot_id)

        self.volume.delete_cgsnapshot(self.context, cgsnapshot)

        self.volume.delete_consistencygroup(self.context, group)

Example 46

Project: cinder
Source File: test_groups_manager.py
View license
    @mock.patch.object(GROUP_QUOTAS, "reserve",
                       return_value=["RESERVATION"])
    @mock.patch.object(GROUP_QUOTAS, "commit")
    @mock.patch.object(GROUP_QUOTAS, "rollback")
    @mock.patch.object(driver.VolumeDriver,
                       "create_group",
                       return_value={'status': 'available'})
    @mock.patch.object(driver.VolumeDriver,
                       "update_group")
    def test_update_group(self, fake_update_grp,
                          fake_create_grp, fake_rollback,
                          fake_commit, fake_reserve):
        """Test group can be updated."""
        group = tests_utils.create_group(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            volume_type_ids=[fake.VOLUME_TYPE_ID],
            group_type_id=fake.GROUP_TYPE_ID,
            host=CONF.host)
        self.volume.create_group(self.context, group)

        volume = tests_utils.create_volume(
            self.context,
            group_id=group.id,
            volume_type_id=fake.VOLUME_TYPE_ID,
            status='available',
            host=group.host)
        self.volume.create_volume(self.context, volume)

        volume2 = tests_utils.create_volume(
            self.context,
            group_id=None,
            volume_type_id=fake.VOLUME_TYPE_ID,
            status='available',
            host=group.host)
        self.volume.create_volume(self.context, volume)

        fake_update_grp.return_value = (
            {'status': fields.GroupStatus.AVAILABLE},
            [{'id': volume2.id, 'status': 'available'}],
            [{'id': volume.id, 'status': 'available'}])

        self.volume.update_group(self.context, group,
                                 add_volumes=volume2.id,
                                 remove_volumes=volume.id)
        grp = objects.Group.get_by_id(self.context, group.id)
        expected = {
            'status': fields.GroupStatus.AVAILABLE,
            'name': 'test_group',
            'availability_zone': 'nova',
            'tenant_id': self.context.project_id,
            'created_at': 'DONTCARE',
            'user_id': fake.USER_ID,
            'group_id': group.id,
            'group_type': fake.GROUP_TYPE_ID
        }
        self.assertEqual(fields.GroupStatus.AVAILABLE, grp.status)
        self.assertEqual(10, len(self.notifier.notifications),
                         self.notifier.notifications)
        msg = self.notifier.notifications[6]
        self.assertEqual('group.update.start', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])
        msg = self.notifier.notifications[8]
        self.assertEqual('group.update.end', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])
        grpvolumes = db.volume_get_all_by_generic_group(self.context, group.id)
        grpvol_ids = [grpvol['id'] for grpvol in grpvolumes]
        # Verify volume is removed.
        self.assertNotIn(volume.id, grpvol_ids)
        # Verify volume is added.
        self.assertIn(volume2.id, grpvol_ids)

        volume3 = tests_utils.create_volume(
            self.context,
            group_id=None,
            host=group.host,
            volume_type_id=fake.VOLUME_TYPE_ID,
            status='wrong-status')
        volume_id3 = volume3['id']

        volume_get_orig = self.volume.db.volume_get
        self.volume.db.volume_get = mock.Mock(
            return_value={'status': 'wrong_status',
                          'id': volume_id3})
        # Try to add a volume in wrong status
        self.assertRaises(exception.InvalidVolume,
                          self.volume.update_group,
                          self.context,
                          group,
                          add_volumes=volume_id3,
                          remove_volumes=None)
        self.volume.db.volume_get.reset_mock()
        self.volume.db.volume_get = volume_get_orig

Example 47

Project: cinder
Source File: test_groups_manager.py
View license
    @mock.patch.object(driver.VolumeDriver,
                       "create_group",
                       return_value={'status': 'available'})
    @mock.patch.object(driver.VolumeDriver,
                       "delete_group",
                       return_value=({'status': 'deleted'}, []))
    @mock.patch.object(driver.VolumeDriver,
                       "create_group_snapshot",
                       return_value={'status': 'available'})
    @mock.patch.object(driver.VolumeDriver,
                       "delete_group_snapshot",
                       return_value=({'status': 'deleted'}, []))
    @mock.patch.object(driver.VolumeDriver,
                       "create_group_from_src",
                       return_value=(None, None))
    @mock.patch('cinder.volume.drivers.lvm.LVMVolumeDriver.'
                'create_volume_from_snapshot')
    @mock.patch('cinder.volume.drivers.lvm.LVMVolumeDriver.'
                'create_cloned_volume')
    def test_create_group_from_src(self,
                                   mock_create_cloned_vol,
                                   mock_create_vol_from_snap,
                                   mock_create_from_src,
                                   mock_delete_grpsnap,
                                   mock_create_grpsnap,
                                   mock_delete_grp,
                                   mock_create_grp):
        """Test group can be created and deleted."""
        group = tests_utils.create_group(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            status=fields.GroupStatus.AVAILABLE,
            volume_type_ids=[fake.VOLUME_TYPE_ID],
            group_type_id=fake.GROUP_TYPE_ID,
            host=CONF.host)
        volume = tests_utils.create_volume(
            self.context,
            group_id=group.id,
            status='available',
            host=group.host,
            volume_type_id=fake.VOLUME_TYPE_ID,
            size=1)
        volume_id = volume['id']
        group_snapshot_returns = self._create_group_snapshot(group.id,
                                                             [volume_id])
        group_snapshot = group_snapshot_returns[0]
        snapshot_id = group_snapshot_returns[1][0]['id']

        # Create group from source group snapshot.
        group2 = tests_utils.create_group(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            group_snapshot_id=group_snapshot.id,
            volume_type_ids=[fake.VOLUME_TYPE_ID],
            group_type_id=fake.GROUP_TYPE_ID,
            host=CONF.host)
        group2 = objects.Group.get_by_id(self.context, group2.id)
        volume2 = tests_utils.create_volume(
            self.context,
            group_id=group2.id,
            snapshot_id=snapshot_id,
            status='available',
            host=group2.host,
            volume_type_id=fake.VOLUME_TYPE_ID)
        self.volume.create_volume(self.context, volume2)
        self.volume.create_group_from_src(
            self.context, group2, group_snapshot=group_snapshot)
        grp2 = objects.Group.get_by_id(self.context, group2.id)
        expected = {
            'status': fields.GroupStatus.AVAILABLE,
            'name': 'test_group',
            'availability_zone': 'nova',
            'tenant_id': self.context.project_id,
            'created_at': 'DONTCARE',
            'user_id': fake.USER_ID,
            'group_id': group2.id,
            'group_type': fake.GROUP_TYPE_ID,
        }
        self.assertEqual(fields.GroupStatus.AVAILABLE, grp2.status)
        self.assertEqual(group2.id, grp2['id'])
        self.assertEqual(group_snapshot.id, grp2['group_snapshot_id'])
        self.assertIsNone(grp2['source_group_id'])

        msg = self.notifier.notifications[2]
        self.assertEqual('group.create.start', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])
        msg = self.notifier.notifications[4]
        self.assertEqual('group.create.end', msg['event_type'])
        self.assertDictMatch(expected, msg['payload'])

        if len(self.notifier.notifications) > 6:
            self.assertFalse(self.notifier.notifications[6],
                             self.notifier.notifications)
        self.assertEqual(6, len(self.notifier.notifications),
                         self.notifier.notifications)

        self.volume.delete_group(self.context, group2)

        if len(self.notifier.notifications) > 9:
            self.assertFalse(self.notifier.notifications[10],
                             self.notifier.notifications)
        self.assertEqual(9, len(self.notifier.notifications),
                         self.notifier.notifications)

        msg = self.notifier.notifications[6]
        self.assertEqual('group.delete.start', msg['event_type'])
        expected['status'] = fields.GroupStatus.AVAILABLE
        self.assertDictMatch(expected, msg['payload'])
        msg = self.notifier.notifications[8]
        self.assertEqual('group.delete.end', msg['event_type'])
        expected['status'] = fields.GroupStatus.DELETED
        self.assertDictMatch(expected, msg['payload'])

        grp2 = objects.Group.get_by_id(
            context.get_admin_context(read_deleted='yes'), group2.id)
        self.assertEqual(fields.GroupStatus.DELETED, grp2.status)
        self.assertRaises(exception.NotFound,
                          objects.Group.get_by_id,
                          self.context,
                          group2.id)

        # Create group from source group
        group3 = tests_utils.create_group(
            self.context,
            availability_zone=CONF.storage_availability_zone,
            source_group_id=group.id,
            volume_type_ids=[fake.VOLUME_TYPE_ID],
            group_type_id=fake.GROUP_TYPE_ID,
            host=CONF.host)
        volume3 = tests_utils.create_volume(
            self.context,
            group_id=group3.id,
            source_volid=volume_id,
            status='available',
            host=group3.host,
            volume_type_id=fake.VOLUME_TYPE_ID)
        self.volume.create_volume(self.context, volume3)
        self.volume.create_group_from_src(
            self.context, group3, source_group=group)

        grp3 = objects.Group.get_by_id(self.context, group3.id)

        self.assertEqual(fields.GroupStatus.AVAILABLE, grp3.status)
        self.assertEqual(group3.id, grp3.id)
        self.assertEqual(group.id, grp3.source_group_id)
        self.assertIsNone(grp3.group_snapshot_id)

        self.volume.delete_group_snapshot(self.context, group_snapshot)
        self.volume.delete_group(self.context, group)

Example 48

Project: compute-hyperv
Source File: test_migrationops.py
View license
    @mock.patch.object(migrationops.MigrationOps,
                       '_check_and_attach_config_drive')
    @mock.patch.object(migrationops.MigrationOps, '_check_base_disk')
    @mock.patch.object(migrationops.MigrationOps, '_check_resize_vhd')
    @mock.patch.object(migrationops.MigrationOps, '_check_ephemeral_disks')
    def _check_finish_migration(self, mock_check_eph_disks,
                                mock_check_resize_vhd,
                                mock_check_base_disk,
                                mock_check_attach_config_drive,
                                disk_type=constants.DISK):
        mock_instance = fake_instance.fake_instance_obj(self.context)
        mock_instance.flavor.ephemeral_gb = 1
        root_device = {'type': disk_type}
        block_device_info = {'root_disk': root_device, 'ephemerals': []}

        lookup_root_vhd = self._migrationops._pathutils.lookup_root_vhd_path
        get_vhd_info = self._migrationops._vhdutils.get_vhd_info
        mock_vhd_info = get_vhd_info.return_value

        expected_check_resize = []
        expected_get_info = []

        self._migrationops.finish_migration(
            context=self.context, migration=mock.sentinel.migration,
            instance=mock_instance, disk_info=mock.sentinel.disk_info,
            network_info=mock.sentinel.network_info,
            image_meta=mock.sentinel.image_meta, resize_instance=True,
            block_device_info=block_device_info)

        if root_device['type'] == constants.DISK:
            root_device_path = lookup_root_vhd.return_value
            lookup_root_vhd.assert_called_with(mock_instance.name)
            expected_get_info = [mock.call(root_device_path)]
            mock_vhd_info.get.assert_called_once_with("ParentPath")
            mock_check_base_disk.assert_called_once_with(
                self.context, mock_instance, root_device_path,
                mock_vhd_info.get.return_value)
            expected_check_resize.append(
                mock.call(root_device_path, mock_vhd_info,
                          mock_instance.flavor.root_gb * units.Gi))

        ephemerals = block_device_info['ephemerals']
        mock_check_eph_disks.assert_called_once_with(
            mock_instance, ephemerals, True)

        mock_check_resize_vhd.assert_has_calls(expected_check_resize)
        self._migrationops._vhdutils.get_vhd_info.assert_has_calls(
            expected_get_info)
        get_image_vm_gen = self._migrationops._vmops.get_image_vm_generation
        get_image_vm_gen.assert_called_once_with(mock_instance.uuid,
                                                 mock.sentinel.image_meta)
        self._migrationops._vmops.create_instance.assert_called_once_with(
            self.context, mock_instance, mock.sentinel.network_info,
            root_device, block_device_info, get_image_vm_gen.return_value,
            mock.sentinel.image_meta)
        mock_check_attach_config_drive.assert_called_once_with(
            mock_instance, get_image_vm_gen.return_value)
        self._migrationops._vmops.power_on.assert_called_once_with(
            mock_instance)

Example 49

Project: gnocchi
Source File: test_carbonara.py
View license
    def test_get_measures(self):
        with mock.patch.object(
                self.storage, '_get_measures_and_unserialize',
                side_effect=self.storage._get_measures_and_unserialize_v2):
            self.assertEqual([
                (utils.datetime_utc(2016, 7, 17), 86400, 5),
                (utils.datetime_utc(2016, 7, 18), 86400, 8),
                (utils.datetime_utc(2016, 7, 17, 23), 3600, 5),
                (utils.datetime_utc(2016, 7, 18, 0), 3600, 8),
                (utils.datetime_utc(2016, 7, 17, 23, 55), 300, 5),
                (utils.datetime_utc(2016, 7, 18, 0), 300, 8)
            ], self.storage.get_measures(self.metric))

            self.assertEqual([
                (utils.datetime_utc(2016, 7, 17), 86400, 6),
                (utils.datetime_utc(2016, 7, 18), 86400, 9),
                (utils.datetime_utc(2016, 7, 17, 23), 3600, 6),
                (utils.datetime_utc(2016, 7, 18, 0), 3600, 9),
                (utils.datetime_utc(2016, 7, 17, 23, 55), 300, 6),
                (utils.datetime_utc(2016, 7, 18, 0), 300, 9)
            ], self.storage.get_measures(self.metric, aggregation='max'))

        self.upgrade()

        self.assertEqual([
            (utils.datetime_utc(2016, 7, 17), 86400, 5),
            (utils.datetime_utc(2016, 7, 18), 86400, 8),
            (utils.datetime_utc(2016, 7, 17, 23), 3600, 5),
            (utils.datetime_utc(2016, 7, 18, 0), 3600, 8),
            (utils.datetime_utc(2016, 7, 17, 23, 55), 300, 5),
            (utils.datetime_utc(2016, 7, 18, 0), 300, 8)
        ], self.storage.get_measures(self.metric))

        self.assertEqual([
            (utils.datetime_utc(2016, 7, 17), 86400, 6),
            (utils.datetime_utc(2016, 7, 18), 86400, 9),
            (utils.datetime_utc(2016, 7, 17, 23), 3600, 6),
            (utils.datetime_utc(2016, 7, 18, 0), 3600, 9),
            (utils.datetime_utc(2016, 7, 17, 23, 55), 300, 6),
            (utils.datetime_utc(2016, 7, 18, 0), 300, 9)
        ], self.storage.get_measures(self.metric, aggregation='max'))

        with mock.patch.object(
                self.storage, '_get_measures_and_unserialize',
                side_effect=self.storage._get_measures_and_unserialize_v2):
            self.assertRaises(
                storage.AggregationDoesNotExist,
                self.storage.get_measures, self.metric)

            self.assertRaises(
                storage.AggregationDoesNotExist,
                self.storage.get_measures, self.metric, aggregation='max')

        self.storage.add_measures(self.metric, [
            storage.Measure(utils.dt_to_unix_ns(2016, 7, 18), 69),
            storage.Measure(utils.dt_to_unix_ns(2016, 7, 18, 1, 1), 64),
        ])

        with mock.patch.object(self.index, 'list_metrics') as f:
            f.side_effect = [[self.metric], []]
            self.storage.process_background_tasks(
                self.index, [str(self.metric.id)], sync=True)

        self.assertEqual([
            (utils.datetime_utc(2016, 7, 17), 86400, 6),
            (utils.datetime_utc(2016, 7, 18), 86400, 69),
            (utils.datetime_utc(2016, 7, 17, 23), 3600, 6),
            (utils.datetime_utc(2016, 7, 18, 0), 3600, 69),
            (utils.datetime_utc(2016, 7, 18, 1), 3600, 64),
            (utils.datetime_utc(2016, 7, 18, 0), 300, 69),
            (utils.datetime_utc(2016, 7, 18, 1), 300, 64)
        ], self.storage.get_measures(self.metric, aggregation='max'))

Example 50

View license
    def _test_lbaasdriver(self, method_name):
        """ Tests all create/update/delete operation of loadbalancer driver.

        Returns: none

        """
        agent = self._get_lb_handler_objects()
        driver = lb_driver.HaproxyOnVmDriver(agent.plugin_rpc)
        rest_client = _rest_client.HttpRequests(
            self.data.url, self.data.port)
        logical_device_return_value = {
            'vip': self.fo.vip,
            'old_vip': self.fo.old_vip,
            'pool': self.fo.pool,
            'healthmonitors': self.fo.hm,
            'members': self.fo.member}
        with mock.patch.object(
                agent.plugin_rpc, 'get_logical_device',
                return_value=logical_device_return_value), (
            mock.patch.object(
                driver, '_get_rest_client', return_value=rest_client)), (
            mock.patch.object(
                rest_client.pool, 'request',
                return_value=self.resp)) as mock_request, (
            mock.patch.object(
                rest_client, 'get_resource',
                return_value=self.get_resource)) as mock_get_resource:

            mock_request.status_code = 200
            if method_name == 'DELETE_VIP':
                driver.delete_vip(self.fo.vip, self.fo.context)
                mock_request.assert_called_with(
                    'DELETE',
                    data=None,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.delete_vip_url)
            elif method_name == 'CREATE_VIP':
                driver.create_vip(self.fo.vip, self.fo.context)
                data = jsonutils.dumps(self.data.create_vip_data)
                mock_request.assert_called_with(
                    'POST',
                    data=data,
                    headers=self.data.header,
                    timeout=30,
                    url=self.data.create_vip_url)
                mock_get_resource.assert_called_with(
                    self.data.create_vip_resources)
            elif method_name == 'UPDATE_VIP':
                driver.update_vip(
                    self.fo.old_vip,
                    self.fo.vip,
                    self.fo.context)
                data = jsonutils.dumps(self.data.update_vip_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.update_vip_url)
            elif method_name == 'CREATE_POOL':
                driver.create_pool(self.fo.pool, self.fo.context)
            elif method_name == 'DELETE_POOL':
                driver.delete_pool(self.fo.pool, self.fo.context)
            elif method_name == 'UPDATE_POOL':
                driver.update_pool(
                    self.fo.old_pool,
                    self.fo.pool,
                    self.fo.context)
                data = jsonutils.dumps(self.data.update_pool_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.update_pool_url)
            elif method_name == 'CREATE_MEMBER':
                driver.create_member(self.fo.member[0], self.fo.context)
                data = jsonutils.dumps(self.data.create_member_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.create_member_url)
            elif method_name == 'DELETE_MEMBER':
                driver.delete_member(self.fo.member[0], self.fo.context)
                data = jsonutils.dumps(self.data.delete_member_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.delete_member_url)
            elif method_name == 'UPDATE_MEMBER':
                driver.update_member(
                    self.fo.old_member[0],
                    self.fo.member[0],
                    self.fo.context)
                data = jsonutils.dumps(self.data.update_member_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.update_member_url)
            elif method_name == 'CREATE_POOL_HEALTH_MONITOR':
                driver.create_pool_health_monitor(
                    self.fo.hm[0], self.fo._get_pool_object()[0]['id'],
                    self.fo.context)
                data = jsonutils.dumps(self.data.create_hm_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.create_hm_url)
            elif method_name == 'DELETE_POOL_HEALTH_MONITOR':
                driver.delete_pool_health_monitor(
                    self.fo.hm[0], self.fo._get_pool_object()[0]['id'],
                    self.fo.context)
                data = jsonutils.dumps(self.data.delete_hm_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.delete_hm_url)
            elif method_name == 'UPDATE_POOL_HEALTH_MONITOR':
                driver.update_pool_health_monitor(
                    self.fo.old_hm[0],
                    self.fo.hm[0], self.fo._get_pool_object()[0]['id'],
                    self.fo.context)
                data = jsonutils.dumps(self.data.update_hm_data)
                mock_request.assert_called_with(
                    'PUT',
                    data=data,
                    headers=self.data.header,
                    timeout=self.data.timeout,
                    url=self.data.update_hm_url)