mock.patch

Here are the examples of the python api mock.patch taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

175 Examples 7

Example 101

Project: socorro Source File: test_fetch_adi_from_hive.py
    @mock.patch('socorro.cron.jobs.fetch_adi_from_hive.pyhs2')
    def test_mocked_fetch(self, fake_hive):
        config_manager = self._setup_config_manager()

        yesterday = (
            datetime.datetime.utcnow() - datetime.timedelta(days=1)
        ).date()

        def return_test_data(fake):
            yield [
                yesterday,
                'WinterWolf',
                'Ginko',
                '2.3.1',
                '10.0.4',
                'nightly-ww3v20',
                'nightly',
                'a-guid',
                1
            ]
            yield [
                yesterday,
                'NothingMuch',
                'Ginko',
                '3.2.1',
                '10.0.4',
                'release-ww3v20',
                'release-cck-blah',
                '[email protected]',
                1
            ]
            yield [
                '2019-01-01',
                'NothingMuch',
                u'Ginko☢\0',
                '2.3.2',
                '10.0.5a',
                'release',
                'release-cck-\\',
                '%7Ba-guid%7D',
                2
            ]
            yield [
                '2019-01-01',
                'Missing',
                'Ginko',
                '2.3.2',
                '',
                None,
                'release',
                '%7Ba-guid%7D',
                2
            ]
            yield [
                yesterday,
                'FennecAndroid',   # product name
                'Ginko',           # platform?
                '3.1415',          # platform version
                '38.0',            # product version
                '20150427090529',  # build
                'release',         # update channel
                'a-guid',          # product guid
                666                # count
            ]

        fake_hive.connect.return_value \
            .cursor.return_value.__iter__ = return_test_data

        with config_manager.context() as config:
            tab = CronTabber(config)
            tab.run_all()

            information = self._load_structure()
            assert information['fetch-adi-from-hive']
            assert not information['fetch-adi-from-hive']['last_error']

            config.logger.info.assert_called_with(
                'Wrote 4 rows from doing hive query'
            )

        fake_hive.connect.assert_called_with(
            database='default',
            authMechanism='PLAIN',
            host='localhost',
            user='socorro',
            password='ignored',
            port=10000,
            timeout=1800000,
        )

        pgcursor = self.conn.cursor()
        columns = (
            'report_date',
            'product_name',
            'product_os_platform',
            'product_os_version',
            'product_version',
            'build',
            'build_channel',
            'product_guid',
            'count'
        )
        pgcursor.execute(
            "select %s from raw_adi_logs" % ','.join(columns)
        )
        adi_logs = [dict(zip(columns, row)) for row in pgcursor.fetchall()]
        eq_(adi_logs[0], {
            'report_date': yesterday,
            'product_name': 'WinterWolf',
            'product_os_platform': 'Ginko',
            'product_os_version': '2.3.1',
            'product_version': '10.0.4',
            'build': 'nightly-ww3v20',
            'build_channel': 'nightly',
            'product_guid': 'a-guid',
            'count': 1
        })
        eq_(adi_logs[1], {
            'report_date': yesterday,
            'product_name': 'NothingMuch',
            'product_os_platform': 'Ginko',
            'product_os_version': '3.2.1',
            'product_version': '10.0.4',
            'build': 'release-ww3v20',
            'build_channel': 'release-cck-blah',
            'product_guid': '[email protected]',
            'count': 1
        })
        eq_(adi_logs[2], {
            'report_date': datetime.date(2019, 1, 1),
            'product_name': 'NothingMuch',
            'product_os_platform': 'Ginko\xe2\x98\xa2',
            'product_os_version': '2.3.2',
            'product_version': '10.0.5a',
            'build': 'release',
            'build_channel': 'release-cck-\\',
            'product_guid': '{a-guid}',
            'count': 2
        })
        eq_(adi_logs[3], {
            'report_date': yesterday,
            'product_name': 'FennecAndroid',
            'product_os_platform': 'Ginko',
            'product_os_version': '3.1415',
            'product_version': '38.0',
            'build': '20150427090529',
            'build_channel': 'release',
            'product_guid': 'a-guid',
            'count': 666
        })

        columns = (
            'adi_count',
            'date',
            'product_name',
            'product_os_platform',
            'product_os_version',
            'product_version',
            'build',
            'product_guid',
            'update_channel',
        )
        pgcursor.execute(
            """ select %s from raw_adi
                order by update_channel desc""" % ','.join(columns)
        )
        adi = [dict(zip(columns, row)) for row in pgcursor.fetchall()]
        eq_(adi[0], {
            'update_channel': 'release',
            'product_guid': '{[email protected]}',
            'product_version': '10.0.4',
            'adi_count': 1,
            'product_os_platform': 'Ginko',
            'build': 'release-ww3v20',
            'date': yesterday,
            'product_os_version': '3.2.1',
            'product_name': 'NothingMuch'
        })
        eq_(adi[1], {
            'update_channel': 'nightly',
            'product_guid': 'a-guid',
            'product_version': '10.0.4',
            'adi_count': 1,
            'product_os_platform': 'Ginko',
            'build': 'nightly-ww3v20',
            'date': yesterday,
            'product_os_version': '2.3.1',
            'product_name': 'WinterWolf'
        })
        eq_(adi[2], {
            'update_channel': 'beta',
            'product_guid': 'a-guid',
            'product_version': '38.0',
            'adi_count': 666,
            'product_os_platform': 'Ginko',
            'build': '20150427090529',
            'date': yesterday,
            'product_os_version': '3.1415',
            'product_name': 'FennecAndroid'
        })

Example 102

Project: cloudbase-init Source File: test_x509.py
    @mock.patch('cloudbaseinit.utils.windows.x509.free')
    @mock.patch('cloudbaseinit.utils.windows.x509.malloc')
    @mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
                '._add_system_time_interval')
    @mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
                '._generate_key')
    @mock.patch('cloudbaseinit.utils.windows.x509.CryptoAPICertManager'
                '._get_cert_thumprint')
    @mock.patch('uuid.uuid4')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertStrToName')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CRYPTOAPI_BLOB')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CRYPT_KEY_PROV_INFO')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CRYPT_ALGORITHM_IDENTIFIER')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'SYSTEMTIME')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'GetSystemTime')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertCreateSelfSignCertificate')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertAddEnhancedKeyUsageIdentifier')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertOpenStore')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertAddCertificateContextToStore')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertCloseStore')
    @mock.patch('cloudbaseinit.utils.windows.cryptoapi.'
                'CertFreeCertificateContext')
    def _test_create_self_signed_cert(self, mock_CertFreeCertificateContext,
                                      mock_CertCloseStore,
                                      mock_CertAddCertificateContextToStore,
                                      mock_CertOpenStore,
                                      mock_CertAddEnhancedKeyUsageIdentifier,
                                      mock_CertCreateSelfSignCertificate,
                                      mock_GetSystemTime, mock_SYSTEMTIME,
                                      mock_CRYPT_ALGORITHM_IDENTIFIER,
                                      mock_CRYPT_KEY_PROV_INFO,
                                      mock_CRYPTOAPI_BLOB,
                                      mock_CertStrToName,
                                      mock_uuid4, mock_get_cert_thumprint,
                                      mock_generate_key,
                                      mock_add_system_time_interval,
                                      mock_malloc, mock_free, certstr,
                                      certificate, enhanced_key, store_handle,
                                      context_to_store):

        mock_POINTER = self._ctypes.POINTER
        mock_byref = self._ctypes.byref
        mock_cast = self._ctypes.cast

        mock_uuid4.return_value = 'fake_name'
        mock_CertCreateSelfSignCertificate.return_value = certificate
        mock_CertAddEnhancedKeyUsageIdentifier.return_value = enhanced_key
        mock_CertStrToName.return_value = certstr
        mock_CertOpenStore.return_value = store_handle
        mock_CertAddCertificateContextToStore.return_value = context_to_store
        if (certstr is None or certificate is None or enhanced_key is None
                or store_handle is None or context_to_store is None):
            self.assertRaises(self.x509.cryptoapi.CryptoAPIException,
                              self._x509_manager.create_self_signed_cert,
                              'fake subject', 10, True,
                              self.x509.STORE_NAME_MY)
        else:
            response = self._x509_manager.create_self_signed_cert(
                subject='fake subject')
            mock_cast.assert_called_with(mock_malloc(), mock_POINTER())
            mock_CRYPTOAPI_BLOB.assert_called_once_with()
            mock_CRYPT_KEY_PROV_INFO.assert_called_once_with()
            mock_CRYPT_ALGORITHM_IDENTIFIER.assert_called_once_with()
            mock_SYSTEMTIME.assert_called_once_with()
            mock_GetSystemTime.assert_called_once_with(mock_byref())
            mock_CertCreateSelfSignCertificate.assert_called_once_with(
                None, mock_byref(), 0, mock_byref(),
                mock_byref(), mock_byref(), mock_byref(), None)
            mock_CertAddEnhancedKeyUsageIdentifier.assert_called_with(
                mock_CertCreateSelfSignCertificate(),
                self.x509.cryptoapi.szOID_PKIX_KP_SERVER_AUTH)
            mock_CertOpenStore.assert_called_with(
                self.x509.cryptoapi.CERT_STORE_PROV_SYSTEM, 0, 0,
                self.x509.cryptoapi.CERT_SYSTEM_STORE_LOCAL_MACHINE,
                six.text_type(self.x509.STORE_NAME_MY))
            mock_get_cert_thumprint.assert_called_once_with(
                mock_CertCreateSelfSignCertificate())
            mock_add_system_time_interval.assert_has_calls(
                [mock.call(mock_SYSTEMTIME.return_value,
                           self.x509.X509_END_DATE_INTERVAL),
                 mock.call(mock_SYSTEMTIME.return_value,
                           self.x509.X509_START_DATE_INTERVAL)])
            mock_CertCloseStore.assert_called_once_with(store_handle, 0)
            mock_CertFreeCertificateContext.assert_called_once_with(
                mock_CertCreateSelfSignCertificate())
            mock_free.assert_called_once_with(mock_cast())

            self.assertEqual(mock_get_cert_thumprint.return_value, response)

        mock_generate_key.assert_called_once_with('fake_name', True)

Example 103

Project: oauth2client Source File: test_service_account.py
    @mock.patch('oauth2client.client._UTCNOW')
    def test_access_token(self, utcnow):
        # Configure the patch.
        seconds = 11
        NOW = datetime.datetime(1992, 12, 31, second=seconds)
        utcnow.return_value = NOW

        # Create a custom credentials with a mock signer.
        signer = mock.Mock()
        signed_value = b'signed-content'
        signer.sign = mock.Mock(name='sign',
                                return_value=signed_value)
        credentials = service_account.ServiceAccountCredentials(
            self.service_account_email,
            signer,
            private_key_id=self.private_key_id,
            client_id=self.client_id,
        )

        # Begin testing.
        lifetime = 2  # number of seconds in which the token expires
        EXPIRY_TIME = datetime.datetime(1992, 12, 31,
                                        second=seconds + lifetime)

        token1 = u'first_token'
        token_response_first = {
            'access_token': token1,
            'expires_in': lifetime,
        }
        token2 = u'second_token'
        token_response_second = {
            'access_token': token2,
            'expires_in': lifetime,
        }
        http = http_mock.HttpMockSequence([
            ({'status': http_client.OK},
             json.dumps(token_response_first).encode('utf-8')),
            ({'status': http_client.OK},
             json.dumps(token_response_second).encode('utf-8')),
        ])

        # Get Access Token, First attempt.
        self.assertIsNone(credentials.access_token)
        self.assertFalse(credentials.access_token_expired)
        self.assertIsNone(credentials.token_expiry)
        token = credentials.get_access_token(http=http)
        self.assertEqual(credentials.token_expiry, EXPIRY_TIME)
        self.assertEqual(token1, token.access_token)
        self.assertEqual(lifetime, token.expires_in)
        self.assertEqual(token_response_first,
                         credentials.token_response)
        # Two utcnow calls are expected:
        # - get_access_token() -> _do_refresh_request (setting expires in)
        # - get_access_token() -> _expires_in()
        expected_utcnow_calls = [mock.call()] * 2
        self.assertEqual(expected_utcnow_calls, utcnow.mock_calls)
        # One call to sign() expected: Actual refresh was needed.
        self.assertEqual(len(signer.sign.mock_calls), 1)

        # Get Access Token, Second Attempt (not expired)
        self.assertEqual(credentials.access_token, token1)
        self.assertFalse(credentials.access_token_expired)
        token = credentials.get_access_token(http=http)
        # Make sure no refresh occurred since the token was not expired.
        self.assertEqual(token1, token.access_token)
        self.assertEqual(lifetime, token.expires_in)
        self.assertEqual(token_response_first, credentials.token_response)
        # Three more utcnow calls are expected:
        # - access_token_expired
        # - get_access_token() -> access_token_expired
        # - get_access_token -> _expires_in
        expected_utcnow_calls = [mock.call()] * (2 + 3)
        self.assertEqual(expected_utcnow_calls, utcnow.mock_calls)
        # No call to sign() expected: the token was not expired.
        self.assertEqual(len(signer.sign.mock_calls), 1 + 0)

        # Get Access Token, Third Attempt (force expiration)
        self.assertEqual(credentials.access_token, token1)
        credentials.token_expiry = NOW  # Manually force expiry.
        self.assertTrue(credentials.access_token_expired)
        token = credentials.get_access_token(http=http)
        # Make sure refresh occurred since the token was not expired.
        self.assertEqual(token2, token.access_token)
        self.assertEqual(lifetime, token.expires_in)
        self.assertFalse(credentials.access_token_expired)
        self.assertEqual(token_response_second,
                         credentials.token_response)
        # Five more utcnow calls are expected:
        # - access_token_expired
        # - get_access_token -> access_token_expired
        # - get_access_token -> _do_refresh_request
        # - get_access_token -> _expires_in
        # - access_token_expired
        expected_utcnow_calls = [mock.call()] * (2 + 3 + 5)
        self.assertEqual(expected_utcnow_calls, utcnow.mock_calls)
        # One more call to sign() expected: Actual refresh was needed.
        self.assertEqual(len(signer.sign.mock_calls), 1 + 0 + 1)

        self.assertEqual(credentials.access_token, token2)

Example 104

Project: stackalytics Source File: test_vcs.py
Function: test_git_log
    def test_git_log(self):
        with mock.patch('sh.git') as git_mock:
            git_mock.return_value = '''
commit_id:b5a416ac344160512f95751ae16e6612aefd4a57
date:1369119386
author_name:Akihiro MOTOKI
author_email:[email protected]
subject:Remove class-based import in the code repo
message:Fixes bug 1167901.

This commit also removes backslashes for line break.

Change-Id: Id26fdfd2af4862652d7270aec132d40662efeb96

diff_stat:

 21 files changed, 340 insertions(+), 408 deletions(-)
commit_id:5be031f81f76d68c6e4cbaad2247044aca179843
date:1370975889
author_name:Monty Taylor
author_email:[email protected]
subject:Remove explicit distribute depend.
message:Causes issues with the recent re-merge with setuptools. Advice from
upstream is to stop doing explicit depends.

Change-Id: I70638f239794e78ba049c60d2001190910a89c90

diff_stat:

 1 file changed, 1 deletion(-)
commit_id:2dcb4fa4aa1925ffbd90d1cc7556a13a1bc45d1c
date:1369831203
author_name:Mark McClain
author_email:[email protected]
subject:add readme for 2.2.2
message:Fixes bug: 1234567
Also fixes bug 987654
Change-Id: Id32a4a72ec1d13992b306c4a38e73605758e26c7

diff_stat:

 1 file changed, 8 insertions(+)
commit_id:06d321b6b7681b162cd3231b5bdd92b17eb4f401
date:1369831203
author_name:John Doe
author_email:[email protected]
subject:add readme for 2.2.2
message: implements blueprint fix-me.
Co-Authored-By: Anonymous <wrong@email>
Change-Id: Id32a4a72ec1d13992b306c4a38e73605758e26c7

diff_stat:

 0 files changed
commit_id:913c86a9d5b6a1b74db36266e996cb4d6073f75b
date:1369831203
author_name:Doug Hoffner
author_email:[email protected]
subject:add readme for 2.2.2
message:Change-Id: Id32a4a72ec1d13992b306c4a38e73605758e26c7
Co-Authored-By: some friend of mine

diff_stat:

 0 files changed, 0 insertions(+), 0 deletions(-)
commit_id:2f3103a96c4d234a4fcc0b0211a20308c0d342e7
date:1397687866
author_name:James E. Blair
author_email:[email protected]
subject:Reduce IAD usage by 50%
message:At provider's request.

Change-Id: I976eaff357bf0ad4bce2a7fd5fe6fd81750276c5

diff_stat:
commit_id:12811c76f3a8208b36f81e61451ec17d227b4e58
date:1369831203
author_name:Jimi Hendrix
author_email:[email protected]
subject:adds support off co-authors
message:Change-Id: Id811c762ec1d13992b306c4a38e7360575e61451
Co-Authored-By: Tupac Shakur <[email protected]>
Also-By: Bob Dylan <[email protected]>
Also-By: Anonymous <wrong@email>
Also-By: Winnie the Pooh [email protected]

diff_stat:

 0 files changed, 0 insertions(+), 0 deletions(-)
commit_id:d1af9cbe0187e1a65cf1eb46fb1650cf619a7b3a
date:1369831300
author_name:Vasya Pupkin
author_email:[email protected]
subject:adds new support of co-authors
message:Change-Id: I577dfdf7f65a0c883ddbcfda62daf8c5f9c746c1
Co-Authored-By: Tupac Shakur <[email protected]>
Also: Bob Dylan <[email protected]>
Co-Authored: Anonymous <[email protected]>
Co-Author-By: Anonymous2 <[email protected]>
Co-Author: Winnie the Pooh [email protected]

diff_stat:

 0 files changed, 0 insertions(+), 0 deletions(-)
            '''
            commits = list(self.git.log('dummy', 'dummy'))

        commits_expected = 8
        self.assertEqual(commits_expected, len(commits))

        self.assertEqual(21, commits[0]['files_changed'])
        self.assertEqual(340, commits[0]['lines_added'])
        self.assertEqual(408, commits[0]['lines_deleted'])
        self.assertEqual(['1167901'], commits[0]['bug_id'])

        self.assertEqual(1, commits[1]['files_changed'])
        self.assertEqual(0, commits[1]['lines_added'])
        self.assertEqual(1, commits[1]['lines_deleted'])

        self.assertEqual(1, commits[2]['files_changed'])
        self.assertEqual(8, commits[2]['lines_added'])
        self.assertEqual(0, commits[2]['lines_deleted'])
        self.assertEqual(set(['987654', '1234567']),
                         set(commits[2]['bug_id']))

        self.assertEqual(0, commits[3]['files_changed'])
        self.assertEqual(0, commits[3]['lines_added'])
        self.assertEqual(0, commits[3]['lines_deleted'])
        self.assertEqual(set(['dummy:fix-me']),
                         set(commits[3]['blueprint_id']))
        self.assertFalse('coauthor' in commits[3])

        self.assertEqual(0, commits[4]['files_changed'])
        self.assertEqual(0, commits[4]['lines_added'])
        self.assertEqual(0, commits[4]['lines_deleted'])
        self.assertFalse('coauthor' in commits[4])

        self.assertEqual('[email protected]', commits[5]['author_email'])
        self.assertEqual(0, commits[5]['files_changed'])
        self.assertEqual(0, commits[5]['lines_added'])
        self.assertEqual(0, commits[5]['lines_deleted'])

        self.assertIn(
            {'author_name': 'Tupac Shakur',
             'author_email': '[email protected]'},
            commits[6]['coauthor'])

        self.assertIn(
            {'author_name': 'Bob Dylan',
             'author_email': '[email protected]'},
            commits[6]['coauthor'])

        self.assertIn(
            {'author_name': 'Winnie the Pooh',
             'author_email': '[email protected]'},
            commits[6]['coauthor'])

        self.assertIn(
            {'author_name': 'Tupac Shakur',
             'author_email': '[email protected]'},
            commits[7]['coauthor'])

        self.assertNotIn(
            {'author_name': 'Bob Dylan',
             'author_email': '[email protected]'},
            commits[7]['coauthor'])

        self.assertNotIn(
            {'author_name': 'Anonymous',
             'author_email': '[email protected]'},
            commits[7]['coauthor'])

        self.assertNotIn(
            {'author_name': 'Anonymous2',
             'author_email': '[email protected]'},
            commits[7]['coauthor'])

        self.assertIn(
            {'author_name': 'Winnie the Pooh',
             'author_email': '[email protected]'},
            commits[7]['coauthor'])

Example 105

Project: ANALYSE Source File: test_shib.py
    @unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
    def test_shib_login(self):
        """
        Tests that:
          * shib credentials that match an existing ExternalAuthMap with a linked active user logs the user in
          * shib credentials that match an existing ExternalAuthMap with a linked inactive user shows error page
          * shib credentials that match an existing ExternalAuthMap without a linked user and also match the email
            of an existing user without an existing ExternalAuthMap links the two and log the user in
          * shib credentials that match an existing ExternalAuthMap without a linked user and also match the email
            of an existing user that already has an ExternalAuthMap causes an error (403)
          * shib credentials that do not match an existing ExternalAuthMap causes the registration form to appear
        """

        user_w_map = UserFactory.create(email='[email protected]')
        extauth = ExternalAuthMap(external_id='[email protected]',
                                  external_email='',
                                  external_domain='shib:https://idp.stanford.edu/',
                                  external_credentials="",
                                  user=user_w_map)
        user_wo_map = UserFactory.create(email='[email protected]')
        user_w_map.save()
        user_wo_map.save()
        extauth.save()

        inactive_user = UserFactory.create(email='[email protected]')
        inactive_user.is_active = False
        inactive_extauth = ExternalAuthMap(external_id='[email protected]',
                                           external_email='',
                                           external_domain='shib:https://idp.stanford.edu/',
                                           external_credentials="",
                                           user=inactive_user)
        inactive_user.save()
        inactive_extauth.save()

        idps = ['https://idp.stanford.edu/', 'https://someother.idp.com/']
        remote_users = ['[email protected]', '[email protected]',
                        'testuser2@someother_idp.com', '[email protected]']

        for idp in idps:
            for remote_user in remote_users:
                request = self.request_factory.get('/shib-login')
                request.session = import_module(settings.SESSION_ENGINE).SessionStore()  # empty session
                request.META.update({'Shib-Identity-Provider': idp,
                                     'REMOTE_USER': remote_user,
                                     'mail': remote_user})
                request.user = AnonymousUser()

                mako_middleware_process_request(request)
                with patch('external_auth.views.AUDIT_LOG') as mock_audit_log:
                    response = shib_login(request)
                audit_log_calls = mock_audit_log.method_calls

                if idp == "https://idp.stanford.edu/" and remote_user == '[email protected]':
                    self.assertIsInstance(response, HttpResponseRedirect)
                    self.assertEqual(request.user, user_w_map)
                    self.assertEqual(response['Location'], '/')
                    # verify logging:
                    self.assertEquals(len(audit_log_calls), 2)
                    self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
                    method_name, args, _kwargs = audit_log_calls[1]
                    self.assertEquals(method_name, 'info')
                    self.assertEquals(len(args), 1)
                    self.assertIn(u'Login success', args[0])
                    self.assertIn(remote_user, args[0])
                elif idp == "https://idp.stanford.edu/" and remote_user == '[email protected]':
                    self.assertEqual(response.status_code, 403)
                    self.assertIn("Account not yet activated: please look for link in your email", response.content)
                    # verify logging:
                    self.assertEquals(len(audit_log_calls), 2)
                    self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
                    method_name, args, _kwargs = audit_log_calls[1]
                    self.assertEquals(method_name, 'warning')
                    self.assertEquals(len(args), 1)
                    self.assertIn(u'is not active after external login', args[0])
                    # self.assertEquals(remote_user, args[1])
                elif idp == "https://idp.stanford.edu/" and remote_user == '[email protected]':
                    self.assertIsNotNone(ExternalAuthMap.objects.get(user=user_wo_map))
                    self.assertIsInstance(response, HttpResponseRedirect)
                    self.assertEqual(request.user, user_wo_map)
                    self.assertEqual(response['Location'], '/')
                    # verify logging:
                    self.assertEquals(len(audit_log_calls), 2)
                    self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
                    method_name, args, _kwargs = audit_log_calls[1]
                    self.assertEquals(method_name, 'info')
                    self.assertEquals(len(args), 1)
                    self.assertIn(u'Login success', args[0])
                    self.assertIn(remote_user, args[0])
                elif idp == "https://someother.idp.com/" and remote_user in \
                            ['[email protected]', '[email protected]', '[email protected]']:
                    self.assertEqual(response.status_code, 403)
                    self.assertIn("You have already created an account using an external login", response.content)
                    # no audit logging calls
                    self.assertEquals(len(audit_log_calls), 0)
                else:
                    self.assertEqual(response.status_code, 200)
                    self.assertContains(response,
                                        ("Preferences for {platform_name}"
                                         .format(platform_name=settings.PLATFORM_NAME)))
                    # no audit logging calls
                    self.assertEquals(len(audit_log_calls), 0)

Example 106

Project: girder Source File: lib_test.py
    def testRestCore(self):
        self.assertTrue(self.user['admin'])

        # Test authentication with bad args
        flag = False
        try:
            self.client.authenticate()
        except Exception:
            flag = True

        self.assertTrue(flag)

        # Test authentication failure
        flag = False
        try:
            self.client.authenticate(username=self.user['login'], password='wrong')
        except girder_client.AuthenticationError:
            flag = True

        self.assertTrue(flag)

        # Interactive login (successfully)
        with mock.patch('six.moves.input', return_value=self.user['login']),\
                mock.patch('getpass.getpass', return_value='password'):
            self.client.authenticate(interactive=True)

        # /user/me should now return our user info
        user = self.client.getResource('user/me')
        self.assertEqual(user['login'], 'mylogin')

        # Test HTTP error case
        flag = False
        try:
            self.client.getResource('user/badId')
        except girder_client.HttpError as e:
            self.assertEqual(e.status, 400)
            self.assertEqual(e.method, 'GET')
            resp = json.loads(e.responseText)
            self.assertEqual(resp['type'], 'validation')
            self.assertEqual(resp['field'], 'id')
            self.assertEqual(resp['message'], 'Invalid ObjectId: badId')
            flag = True

        self.assertTrue(flag)

        # Test some folder routes
        folders = list(self.client.listFolder(
            parentId=user['_id'], parentFolderType='user'))
        self.assertEqual(len(folders), 2)

        privateFolder = publicFolder = None
        for folder in folders:
            if folder['name'] == 'Public':
                publicFolder = folder
            elif folder['name'] == 'Private':
                privateFolder = folder

        self.assertNotEqual(privateFolder, None)
        self.assertNotEqual(publicFolder, None)

        self.assertEqual(self.client.getFolder(privateFolder['_id']), privateFolder)

        acl = self.client.getFolderAccess(privateFolder['_id'])
        self.assertIn('users', acl)
        self.assertIn('groups', acl)

        self.client.setFolderAccess(privateFolder['_id'], json.dumps(acl), public=False)
        self.assertEqual(acl, self.client.getFolderAccess(privateFolder['_id']))

        # Test recursive ACL propagation (not very robust test yet)
        self.client.createFolder(privateFolder['_id'], name='Subfolder')
        self.client.inheritAccessControlRecursive(privateFolder['_id'])

        # Test collection creation and retrieval
        c1 = self.client.createCollection('c1', public=False)
        c2 = self.client.createCollection('c2', public=True)
        collections = list(self.client.listCollection())
        self.assertEqual(len(collections), 2)
        ids = [c['_id'] for c in collections]
        self.assertIn(c1['_id'], ids)
        self.assertIn(c2['_id'], ids)
        c1 = self.client.getCollection(c1['_id'])
        c2 = self.client.getCollection(c2['_id'])
        self.assertEqual(c1['name'], 'c1')
        self.assertEqual(c2['name'], 'c2')
        self.assertFalse(c1['public'])
        self.assertTrue(c2['public'])

        # Test user creation and retrieval
        u1 = self.client.createUser(
            'user1', '[email protected]', 'John', 'Doe', 'password', True)
        u2 = self.client.createUser(
            'user2', '[email protected]', 'John', 'Doe', 'password')
        users = list(self.client.listUser())
        self.assertEqual(len(users), 3)
        ids = [u['_id'] for u in users]
        self.assertIn(u1['_id'], ids)
        self.assertIn(u2['_id'], ids)
        u1 = self.client.getUser(u1['_id'])
        u2 = self.client.getUser(u2['_id'])
        self.assertEqual(u1['login'], 'user1')
        self.assertEqual(u2['login'], 'user2')
        self.assertTrue(u1['admin'])
        self.assertFalse(u2['admin'])

Example 107

Project: socorro Source File: test_views.py
    @mock.patch('crashstats.crashstats.models.Bugs.get')
    @mock.patch('requests.post')
    def test_topcrashers(self, rpost, bugs_get):

        def mocked_bugs(**options):
            return {
                "hits": [
                    {"id": 123456789,
                     "signature": "Something"},
                    {"id": 22222,
                     "signature": u"FakeSignature1 \u7684 Japanese"},
                    {"id": 33333,
                     "signature": u"FakeSignature1 \u7684 Japanese"}
                ]
            }
        bugs_get.side_effect = mocked_bugs

        def mocked_signature_first_date_get(**options):
            return {
                "hits": [
                    {
                        "signature": u"FakeSignature1 \u7684 Japanese",
                        "first_date": datetime.datetime(
                            2000, 1, 1, 12, 23, 34,
                            tzinfo=utc,
                        ),
                        "first_build": "20000101122334",
                    },
                    {
                        "signature": u"mozCool()",
                        "first_date": datetime.datetime(
                            2016, 5, 2, 0, 0, 0,
                            tzinfo=utc,
                        ),
                        "first_build": "20160502000000",
                    },
                ],
                "total": 2
            }

        SignatureFirstDate.implementation().get.side_effect = (
            mocked_signature_first_date_get
        )

        def mocked_supersearch_get(**params):
            if '_columns' not in params:
                params['_columns'] = []

            # By default we range by date, so there should be no filter on
            # the build id.
            ok_('build_id' not in params)

            if 'hang_type' not in params['_aggs.signature']:
                # Return results for the previous week.
                results = {
                    'hits': [],
                    'facets': {
                        'signature': [{
                            'term': u'FakeSignature1 \u7684 Japanese',
                            'count': 100,
                            'facets': {
                                'platform': [{
                                    'term': 'WaterWolf',
                                    'count': 50,
                                }],
                            }
                        }]
                    },
                    'total': 250
                }
            else:
                # Return results for the current week.
                results = {
                    'hits': [],
                    'facets': {
                        'signature': [{
                            'term': u'FakeSignature1 \u7684 Japanese',
                            'count': 100,
                            'facets': {
                                'platform': [{
                                    'term': 'WaterWolf',
                                    'count': 50,
                                }],
                                'is_garbage_collecting': [{
                                    'term': 't',
                                    'count': 50,
                                }],
                                'hang_type': [{
                                    'term': 1,
                                    'count': 50,
                                }],
                                'process_type': [{
                                    'term': 'plugin',
                                    'count': 50,
                                }],
                                'startup_crash': [{
                                    'term': 'T',
                                    'count': 100,
                                }],
                                'histogram_uptime': [{
                                    'term': 0,
                                    'count': 60,
                                }],
                                'cardinality_install_time': {
                                    'value': 13,
                                },
                            }
                        }, {
                            'term': u'mozCool()',
                            'count': 80,
                            'facets': {
                                'platform': [{
                                    'term': 'WaterWolf',
                                    'count': 50,
                                }],
                                'is_garbage_collecting': [{
                                    'term': 't',
                                    'count': 50,
                                }],
                                'hang_type': [{
                                    'term': 1,
                                    'count': 50,
                                }],
                                'process_type': [{
                                    'term': 'browser',
                                    'count': 50,
                                }],
                                'startup_crash': [{
                                    'term': 'T',
                                    'count': 50,
                                }],
                                'histogram_uptime': [{
                                    'term': 0,
                                    'count': 40,
                                }],
                                'cardinality_install_time': {
                                    'value': 11,
                                },
                            }
                        }]
                    },
                    'total': 250
                }

            results['hits'] = self.only_certain_columns(
                results['hits'],
                params['_columns']
            )
            return results
        SuperSearchUnredacted.implementation().get.side_effect = (
            mocked_supersearch_get
        )

        url = self.base_url + '?product=WaterWolf&version=19.0'

        response = self.client.get(self.base_url, {'product': 'WaterWolf'})
        ok_(url in response['Location'])

        # Test that several versions do not raise an error.
        response = self.client.get(self.base_url, {
            'product': 'WaterWolf',
            'version': ['19.0', '20.0'],
        })
        eq_(response.status_code, 200)

        response = self.client.get(self.base_url, {
            'product': 'WaterWolf',
            'version': '19.0',
        })
        eq_(response.status_code, 200)
        doc = pyquery.PyQuery(response.content)
        selected_count = doc('.tc-result-count a[class="selected"]')
        eq_(selected_count.text(), '50')

        # there's actually only one such TD
        bug_ids = [x.text for x in doc('td.bug_ids_more > a')]
        # higher bug number first
        eq_(bug_ids, ['33333', '22222'])

        # Check the first appearance date is there.
        ok_('2000-01-01 12:23:34' in response.content)

        response = self.client.get(self.base_url, {
            'product': 'WaterWolf',
            'version': '19.0',
            '_facets_size': '100',
        })
        eq_(response.status_code, 200)
        doc = pyquery.PyQuery(response.content)
        selected_count = doc('.tc-result-count a[class="selected"]')
        eq_(selected_count.text(), '100')

        # Check the startup crash icon is there.
        ok_(
            'Potential Startup Crash, 50 out of 80 crashes happened during '
            'startup' in response.content
        )
        ok_(
            'Startup Crash, all crashes happened during startup'
            in response.content
        )

Example 108

Project: barman Source File: test_backup.py
    @patch('barman.backup.BackupManager.get_available_backups')
    def test_delete_backup(self, mock_available_backups, tmpdir, caplog):
        """
        Simple test for the deletion of a backup.
        We want to test the behaviour of the delete_backup method
        """
        # Setup of the test backup_manager
        backup_manager = build_backup_manager()
        backup_manager.server.config.name = 'TestServer'
        backup_manager.server.config.barman_lock_directory = tmpdir.strpath
        backup_manager.server.config.backup_options = []

        # Create a fake backup directory inside tmpdir (old format)

        base_dir = tmpdir.mkdir('base')
        backup_dir = base_dir.mkdir('fake_backup_id')
        pg_data = backup_dir.mkdir('pgdata')
        pg_data_v2 = backup_dir.mkdir('data')
        wal_dir = tmpdir.mkdir('wals')
        wal_history_file02 = wal_dir.join('00000002.history')
        wal_history_file03 = wal_dir.join('00000003.history')
        wal_history_file04 = wal_dir.join('00000004.history')
        wal_history_file02.write('1\t0/2000028\tat restore point "myrp"\n')
        wal_history_file03.write('1\t0/2000028\tat restore point "myrp"\n')
        wal_history_file04.write('1\t0/2000028\tat restore point "myrp"\n')
        wal_history_file04.write('2\t0/3000028\tunknown\n')
        wal_file = wal_dir.join('0000000100000000/000000010000000000000001')
        wal_file.ensure()
        xlog_db = wal_dir.join('xlog.db')
        xlog_db.write(
            '000000010000000000000001\t42\t43\tNone\n'
            '00000002.history\t42\t43\tNone\n'
            '00000003.history\t42\t43\tNone\n'
            '00000004.history\t42\t43\tNone\n')
        backup_manager.server.xlogdb.return_value.__enter__.return_value = (
            xlog_db.open())
        backup_manager.server.config.basebackups_directory = base_dir.strpath
        backup_manager.server.config.wals_directory = wal_dir.strpath
        # The following tablespaces are defined in the default backup info
        # generated by build_test_backup_info
        b_info = build_test_backup_info(
            backup_id='fake_backup_id',
            server=backup_manager.server,
        )

        # Make sure we are not trying to delete any WAL file,
        # just by having a previous backup
        b_pre_info = build_test_backup_info(
            backup_id='fake_backup',
            server=backup_manager.server,
        )
        mock_available_backups.return_value = {
            "fake_backup": b_pre_info,
            "fake_backup_id": b_info,
        }

        # Test 1: minimum redundancy not satisfied
        caplog_reset(caplog)
        backup_manager.server.config.minimum_redundancy = 2
        b_info.set_attribute('backup_version', 1)
        build_backup_directories(b_info)
        backup_manager.delete_backup(b_info)
        assert 'WARNING  Skipping delete of backup ' in caplog.text
        assert 'ERROR' not in caplog.text
        assert os.path.exists(pg_data.strpath)
        assert not os.path.exists(pg_data_v2.strpath)
        assert os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_history_file02.strpath)
        assert os.path.exists(wal_history_file03.strpath)
        assert os.path.exists(wal_history_file04.strpath)

        # Test 2: normal delete expecting no errors (old format)
        caplog_reset(caplog)
        backup_manager.server.config.minimum_redundancy = 1
        b_info.set_attribute('backup_version', 1)
        build_backup_directories(b_info)
        backup_manager.delete_backup(b_info)
        # the backup must not exists on disk anymore
        assert 'WARNING' not in caplog.text
        assert 'ERROR' not in caplog.text
        assert not os.path.exists(pg_data.strpath)
        assert not os.path.exists(pg_data_v2.strpath)
        assert os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_history_file02.strpath)
        assert os.path.exists(wal_history_file03.strpath)
        assert os.path.exists(wal_history_file04.strpath)

        # Test 3: delete the backup again, expect a failure in log
        caplog_reset(caplog)
        backup_manager.delete_backup(b_info)
        assert 'ERROR    Failure deleting backup fake_backup_id' in caplog.text
        assert not os.path.exists(pg_data.strpath)
        assert not os.path.exists(pg_data_v2.strpath)
        assert os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_history_file02.strpath)
        assert os.path.exists(wal_history_file03.strpath)
        assert os.path.exists(wal_history_file04.strpath)

        # Test 4: normal delete expecting no errors (new format)
        caplog_reset(caplog)
        b_info.set_attribute('backup_version', 2)
        build_backup_directories(b_info)
        backup_manager.delete_backup(b_info)
        assert 'WARNING' not in caplog.text
        assert 'ERROR' not in caplog.text
        assert not os.path.exists(pg_data.strpath)
        assert not os.path.exists(pg_data_v2.strpath)
        assert os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_history_file02.strpath)
        assert os.path.exists(wal_history_file03.strpath)
        assert os.path.exists(wal_history_file04.strpath)

        # Test 5: normal delete of first backup no errors and no skip
        # removing one of the two backups present (new format)
        # and all the previous wal
        caplog_reset(caplog)
        b_pre_info.set_attribute('backup_version', 2)
        build_backup_directories(b_pre_info)
        backup_manager.delete_backup(b_pre_info)
        assert 'WARNING' not in caplog.text
        assert 'ERROR' not in caplog.text
        assert not os.path.exists(pg_data.strpath)
        assert not os.path.exists(pg_data_v2.strpath)
        assert not os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_history_file02.strpath)
        assert os.path.exists(wal_history_file03.strpath)
        assert os.path.exists(wal_history_file04.strpath)

        # Test 6: normal delete of first backup no errors and no skip
        # removing one of the two backups present (new format)
        # the previous wal is retained as on a different timeline
        caplog_reset(caplog)
        wal_file.ensure()
        b_pre_info.set_attribute('timeline', 2)
        b_pre_info.set_attribute('backup_version', 2)
        build_backup_directories(b_pre_info)
        backup_manager.delete_backup(b_pre_info)
        assert 'WARNING' not in caplog.text
        assert 'ERROR' not in caplog.text
        assert not os.path.exists(pg_data.strpath)
        assert not os.path.exists(pg_data_v2.strpath)
        assert os.path.exists(wal_file.strpath)
        assert os.path.exists(wal_history_file02.strpath)
        assert os.path.exists(wal_history_file03.strpath)
        assert os.path.exists(wal_history_file04.strpath)

        # Test 7: simulate an error deleting the the backup.
        with patch('barman.backup.BackupManager.delete_backup_data')\
                as mock_delete_data:
            caplog_reset(caplog)
            # We force delete_pgdata method to raise an exception.
            mock_delete_data.side_effect = OSError('TestError')
            wal_file.ensure()
            b_pre_info.set_attribute('backup_version', 2)
            build_backup_directories(b_pre_info)
            backup_manager.delete_backup(b_info)
            assert 'TestError' in caplog.text
            assert os.path.exists(wal_file.strpath)
            assert os.path.exists(wal_history_file02.strpath)
            assert os.path.exists(wal_history_file03.strpath)
            assert os.path.exists(wal_history_file04.strpath)

Example 109

Project: fuel-plugins Source File: test_validator_v4.py
    @mock.patch('fuel_plugin_builder.validators.validator_v4.utils')
    def test_check_tasks_schema_1_0_validation_failed(self, utils_mock, *args):
        checks = [
            {
                'data': {
                    'id': 'task-id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                'errorTextContains': "'cmd' is a required property, "
                                     "value path '0 -> parameters'"
            },
            {
                'data': {
                    'id': 'task-id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                'errorTextContains': "'puppet_manifest' is a required property"
                                     ", value path '0 -> parameters'"
            },
            {
                'data': {
                    'id': 'task-id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                'errorTextContains': "'puppet_manifest' is a required property"
                                     ", value path '0 -> parameters'"
            },
            {
                'data': {
                    'id': 'task-id',
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'yy',
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                'errorTextContains': "'cmd' is a required property, value path"
                                     " '0 -> parameters'"
            },
            {
                'data': {
                    'id': 'task-id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'yy',
                        'retries': 'asd',
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                'errorTextContains': "'asd' is not of type 'integer', value "
                                     "path '0 -> parameters -> retries'"
            },
            {
                'data': {
                    'id': 'task-id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': '',
                        'retries': 1,
                    },
                    'stage': 'pre_deployment',
                    'role': '*'
                },
                'errorTextContains': "'' is too short, value path '0 -> "
                                     "parameters -> puppet_modules'"
            },
            {
                'data': {
                    'id': 'task-id',
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': '',
                        'puppet_modules': 'yy',
                        'retries': 1,
                    },
                    'stage': 'pre_deployment',
                    'role': '*'
                },
                'errorTextContains': "'' is too short, value path '0 -> "
                                     "parameters -> puppet_manifest'"
            }
        ]

        for check in checks:
            utils_mock.parse_yaml.return_value = [check['data']]
            self.assertRaisesRegexp(
                errors.ValidationError,
                check['errorTextContains'],
                self.validator.check_deployment_tasks)

Example 110

Project: djep Source File: tests.py
    @mock.patch('pyconde.attendees.utils.generate_invoice_number',
                new_callable=get_next_invoice_number)
    def test_valid_purchase_process(self, mock_gen_inv_nr):
        response = self.client.get(reverse('attendees_purchase'))

        # check we are on the start page
        self.assertContains(response, '<li class="active">Start</li>', html=True)

        # check for ticket type names:
        self.assertContains(response, 'TT:Student (100.00 EUR)')
        self.assertContains(response, 'TT:Standard (200.00 EUR)')
        self.assertContains(response, 'TT:FinAid (0.00 EUR)')
        self.assertContains(response, 'TT:SIM (12.34 EUR)')
        self.assertContains(response, 'TT:Support10 (10.00 EUR)')
        self.assertContains(response, 'TT:Support50 (50.00 EUR)')
        self.assertNotContains(response, 'TT:OnDesk')

        self.assertQuantityForm(response, self.tt_conf_student, 1)
        self.assertQuantityForm(response, self.tt_conf_standard, 7)
        self.assertQuantityForm(response, self.tt_conf_finaid, 1)
        self.assertQuantityForm(response, self.tt_sim, 10)
        self.assertQuantityForm(response, self.tt_support10, 10)
        self.assertQuantityForm(response, self.tt_support50, 10)
        self.assertNotContains(response,
            '<select id="id_tq-%d-quantity" name="tq-%d-quantity">' % (
                self.tt_conf_ondesk.pk, self.tt_conf_ondesk.pk))

        # TODO: Check for purchase form (billing address, etc.)

        # Post the quantity and purchase data
        data = {
            # quantities
            'tq-%d-quantity' % self.tt_conf_student.pk: 1,
            'tq-%d-quantity' % self.tt_conf_standard.pk: 2,
            'tq-%d-quantity' % self.tt_conf_finaid.pk: 1,
            'tq-%d-quantity' % self.tt_sim.pk: 2,
            'tq-%d-quantity' % self.tt_support10.pk: 0,
            'tq-%d-quantity' % self.tt_support50.pk: 4,

            # billing address
            'city': 'P:Berlin',
            'comments': 'P:SomeComment',
            'company_name': 'P:ExCom',
            'country': 'P:Germany',
            'email': '[email protected]',
            'first_name': 'P:FirstName',
            'last_name': 'P:LastName',
            'street': 'P:Street 123',
            'vat_id': 'P:V4TID',
            'zip_code': 'P:Z1P-2345',
        }
        response = self.client.post(reverse('attendees_purchase'), data=data)
        self.assertRedirects(response, reverse('attendees_purchase_names'))

        # check for created tickets
        tickets = self.client.session['purchase_state']['tickets']
        self.assertIsInstance(tickets[0], models.VenueTicket)
        self.assertIsInstance(tickets[1], models.VenueTicket)
        self.assertIsInstance(tickets[2], models.VenueTicket)
        self.assertIsInstance(tickets[3], models.VenueTicket)

        self.assertIsInstance(tickets[4], models.SIMCardTicket)
        self.assertIsInstance(tickets[5], models.SIMCardTicket)

        self.assertIsInstance(tickets[6], models.SupportTicket)
        self.assertIsInstance(tickets[7], models.SupportTicket)
        self.assertIsInstance(tickets[8], models.SupportTicket)
        self.assertIsInstance(tickets[9], models.SupportTicket)
        for i in range(10):
            self.assertEqual(tickets[i].pk, i)  # Check for temp pk

        # TODO: check for created purchase object

        response = self.client.get(reverse('attendees_purchase_names'))

        # check we are on the names page
        self.assertContains(response, '<li class="active">Ticket info</li>', html=True)

        # check for name forms labels
        self.assertContains(response, '<legend>1. Ticket (TT:Student)</legend>', count=1, html=True)
        self.assertContains(response, '<legend>2. Ticket (TT:Standard)</legend>', count=1, html=True)
        self.assertContains(response, '<legend>3. Ticket (TT:Standard)</legend>', count=1, html=True)
        self.assertContains(response, '<legend>4. Ticket (TT:FinAid)</legend>', count=1, html=True)
        self.assertContains(response, '<legend>Voucher</legend>', count=1, html=True)
        self.assertContains(response,
            '<label for="" class="requiredField"> 1. TT:Student <span class="asteriskField">*</span></label>',
            count=1, html=True)
        self.assertContains(response,
            '<label for="" class="requiredField"> 2. TT:FinAid <span class="asteriskField">*</span></label>',
            count=1, html=True)
        self.assertContains(response, '<legend>SIM Card(s)</legend>', count=1, html=True)
        self.assertContains(response, '<legend>1. TT:SIM</legend>', count=1, html=True)
        self.assertContains(response, '<legend>2. TT:SIM</legend>', count=1, html=True)

        # check for form fields
        tickets = self.client.session['purchase_state']['tickets']
        for i in range(4):
            self.assertNameForm(response, tickets[i], models.VenueTicket)
        for i in range(4, 6):
            self.assertNameForm(response, tickets[i], models.SIMCardTicket)

        self.assertVoucherForm(response, tickets[0])  # Student
        self.assertVoucherForm(response, tickets[3])  # FinAid

        # Post the ticket names
        data = {}
        shirtsizes = (self.ts_fm.pk, self.ts_mxl.pk)
        for i in range(4):
            data.update({
                'tn-%d-first_name' % tickets[i].pk: 'TN:%d:FirstName' % i,
                'tn-%d-last_name' % tickets[i].pk: 'TN:%d:LastName' % i,
                'tn-%d-organisation' % tickets[i].pk: 'TN:%d:Organisation' % i,
                'tn-%d-shirtsize' % tickets[i].pk: shirtsizes[i % 2],
            })
        for i in range(4, 6):
            data.update({
                'sc-%d-gender' % tickets[i].pk: (i % 2) and 'male' or 'female',
                'sc-%d-first_name' % tickets[i].pk: 'SC:%d:FirstName' % i,
                'sc-%d-last_name' % tickets[i].pk: 'SC:%d:LastName' % i,
                'sc-%d-date_of_birth' % tickets[i].pk: '2014-0%d-0%d' % (i, i),
                'sc-%d-hotel_name' % tickets[i].pk: 'SC:%d:HotelName' % i,
                'sc-%d-email' % tickets[i].pk: 'sc-%[email protected]' % i,
                'sc-%d-street' % tickets[i].pk: 'SC:%d:Street %d' % (i, i),
                'sc-%d-zip_code' % tickets[i].pk: 'SC:%d:ZIP' % i,
                'sc-%d-city' % tickets[i].pk: 'SC:%d:City' % i,
                'sc-%d-country' % tickets[i].pk: 'SC:%d:Country' % i,
                'sc-%d-phone' % tickets[i].pk: 'SC:%d:Phone' % i,
            })
        data.update({
            'tv-0-code': self.v_student.code,
            'tv-3-code': self.v_fin_aid.code,
        })

        response = self.client.post(reverse('attendees_purchase_names'), data=data)
        self.assertRedirects(response, reverse('attendees_purchase_confirm'))

        # TODO: check ticket data

        response = self.client.get(reverse('attendees_purchase_confirm'))

        # check we are on the confirmation page
        self.assertContains(response, '<li class="active">Overview</li>', html=True)

        # check for ticket list
        self.assertTicketInOverview(response, 'TT:Student', 'TN:0:FirstName',
            'TN:0:LastName', '100.00 EUR')
        self.assertTicketInOverview(response, 'TT:Standard', 'TN:1:FirstName',
            'TN:1:LastName', '200.00 EUR')
        self.assertTicketInOverview(response, 'TT:Standard', 'TN:2:FirstName',
            'TN:2:LastName', '200.00 EUR')
        self.assertTicketInOverview(response, 'TT:FinAid', 'TN:3:FirstName',
            'TN:3:LastName', '0.00 EUR')
        self.assertTicketInOverview(response, 'TT:SIM', 'SC:4:FirstName',
            'SC:4:LastName', '12.34 EUR')
        self.assertTicketInOverview(response, 'TT:SIM', 'SC:5:FirstName',
            'SC:5:LastName', '12.34 EUR')
        self.assertTicketInOverview(response, 'TT:Support50', '', '',
            '50.00 EUR', count=4)

        # check for billing address and total
        self.assertContains(response,
            '<p>'
            'P:ExCom<br />'
            'P:FirstName P:LastName<br />'
            'P:Street 123<br />'
            'P:Z1P-2345 P:Berlin<br />'
            'P:Germany<br />'
            '</p>', count=1, html=True)
        self.assertContains(response, '<td>724.68 EUR</td>', count=1, html=True)

        data = {
            'accept_terms': True,
            'payment_method': 'invoice',
        }

        response = self.client.post(reverse('attendees_purchase_confirm'), data=data, follow=True)
        # TODO: check persisted ticket data

        # check we are on the completion page
        self.assertContains(response, '<li class="active">Complete</li>', html=True)

Example 111

Project: flack Source File: tests.py
Function: test_message
    def test_message(self):
        # create a user and a token
        r, s, h = self.post('/api/users', data={'nickname': 'foo',
                                                'password': 'bar'})
        self.assertEqual(s, 201)
        r, s, h = self.post('/api/tokens', basic_auth='foo:bar')
        self.assertEqual(s, 200)
        token = r['token']

        # create a message
        r, s, h = self.post('/api/messages', data={'source': 'hello *world*!'},
                            token_auth=token)
        self.assertEqual(s, 201)
        url = h['Location']

        # create incomplete message
        r, s, h = self.post('/api/messages', data={'foo': 'hello *world*!'},
                            token_auth=token)
        self.assertEqual(s, 400)

        # get message
        r, s, h = self.get(url, token_auth=token)
        self.assertEqual(s, 200)
        self.assertEqual(r['source'], 'hello *world*!')
        self.assertEqual(r['html'], 'hello <em>world</em>!')

        # modify message
        r, s, h = self.put(url, data={'source': '*hello* world!'},
                           token_auth=token)
        self.assertEqual(s, 204)

        # check modified message
        r, s, h = self.get(url, token_auth=token)
        self.assertEqual(s, 200)
        self.assertEqual(r['source'], '*hello* world!')
        self.assertEqual(r['html'], '<em>hello</em> world!')

        # create a new message
        with mock.patch('flack.utils.time.time',
                        return_value=int(time.time()) + 5):
            r, s, h = self.post('/api/messages',
                                data={'source': 'bye *world*!'},
                                token_auth=token)
        self.assertEqual(s, 201)

        # get list of messages
        r, s, h = self.get('/api/messages', token_auth=token)
        self.assertEqual(s, 200)
        self.assertEqual(len(r['messages']), 2)
        self.assertEqual(r['messages'][0]['source'], '*hello* world!')
        self.assertEqual(r['messages'][1]['source'], 'bye *world*!')

        # get list of messages since
        r, s, h = self.get(
            '/api/messages?updated_since=' + str(int(time.time())),
            token_auth=token)
        self.assertEqual(s, 200)
        self.assertEqual(len(r['messages']), 1)
        self.assertEqual(r['messages'][0]['source'], 'bye *world*!')

        # create a second user and token
        r, s, h = self.post('/api/users', data={'nickname': 'bar',
                                                'password': 'baz'})
        self.assertEqual(s, 201)
        r, s, h = self.post('/api/tokens', basic_auth='bar:baz')
        self.assertEqual(s, 200)
        token2 = r['token']

        # modify message from first user with second user's token
        r, s, h = self.put(url, data={'source': '*hello* world!'},
                           token_auth=token2)
        self.assertEqual(s, 403)

        def responses():
            rv = requests.Response()
            rv.status_code = 200
            rv.encoding = 'utf-8'
            rv._content = (b'<html><head><title>foo</title>'
                           b'<meta name="blah" content="blah">'
                           b'<meta name="description" content="foo descr">'
                           b'</head></html>')
            yield rv
            rv = requests.Response()
            rv.status_code = 200
            rv.encoding = 'utf-8'
            rv._content = b'<html><head><title>bar</title></head></html>'
            yield rv
            rv = requests.Response()
            rv.status_code = 200
            rv.encoding = 'utf-8'
            rv._content = (b'<html><head>'
                           b'<meta name="description" content="baz descr">'
                           b'</head></html>')
            yield rv
            yield requests.exceptions.ConnectionError()

        with mock.patch('flack.models.requests.get', side_effect=responses()):
            r, s, h = self.post(
                '/api/messages',
                data={'source': 'hello http://foo.com!'},
                token_auth=token)
            self.assertEqual(s, 201)

            self.assertEqual(
                r['html'],
                'hello <a href="http://foo.com" rel="nofollow">'
                'http://foo.com</a>!<blockquote><p><a href="http://foo.com">'
                'foo</a></p><p>foo descr</p></blockquote>')

            r, s, h = self.post(
                '/api/messages',
                data={'source': 'hello http://foo.com!'},
                token_auth=token)
            self.assertEqual(s, 201)

            self.assertEqual(
                r['html'],
                'hello <a href="http://foo.com" rel="nofollow">'
                'http://foo.com</a>!<blockquote><p><a href="http://foo.com">'
                'bar</a></p><p>No description found.</p></blockquote>')

            r, s, h = self.post(
                '/api/messages',
                data={'source': 'hello foo.com!'},
                token_auth=token)
            self.assertEqual(s, 201)

            self.assertEqual(
                r['html'],
                'hello <a href="http://foo.com" rel="nofollow">'
                'foo.com</a>!<blockquote><p><a href="http://foo.com">'
                'http://foo.com</a></p><p>baz descr</p></blockquote>')

            r, s, h = self.post(
                '/api/messages',
                data={'source': 'hello foo.com!'},
                token_auth=token)
            self.assertEqual(s, 201)

            self.assertEqual(
                r['html'],
                'hello <a href="http://foo.com" rel="nofollow">'
                'foo.com</a>!')

Example 112

Project: luci-py Source File: testing.py
def mock_datetime_utc(*dec_args, **dec_kwargs):
  """Overrides built-in datetime and date classes to always return a given time.

  Args:
    Same arguments as datetime.datetime accepts to mock UTC time.
  
  Example usage:
    @mock_datetime_utc(2015, 10, 11, 20, 0, 0)
    def my_test(self):
      hour_ago_utc = datetime.datetime.utcnow() - datetime.timedelta(hours=1)
      self.assertEqual(hour_ago_utc, datetime.datetime(2015, 10, 11, 19, 0, 0))

  Note that if you are using now() and today() methods, you should also use
  mock_timezone decorator to have consistent test results across timezones:

    @mock_timezone('US/Pacific')
    @mock_datetime_utc(2015, 10, 11, 20, 0, 0)
    def my_test(self):
      local_dt = datetime.datetime.now()
      self.assertEqual(local_dt, datetime.datetime(2015, 10, 11, 12, 0, 0))
  """
  # We record original values currently stored in the datetime.datetime and
  # datetime.date here. Note that they are no necessarily vanilla Python types
  # and can already be mock classes - this can happen if nested mocking is used.
  original_datetime = datetime.datetime
  original_date = datetime.date

  # Our metaclass must be derived from the parent class metaclass, but if the
  # parent class doesn't have one, we use 'type' type.
  class MockDateTimeMeta(original_datetime.__dict__.get('__metaclass__', type)):
    @classmethod
    def __instancecheck__(cls, instance):
      return isinstance(instance, original_datetime)

  class _MockDateTime(original_datetime):
    __metaclass__ = MockDateTimeMeta
    mock_utcnow = original_datetime(*dec_args, **dec_kwargs)
  
    @classmethod
    def utcnow(cls):
      return cls.mock_utcnow
  
    @classmethod
    def now(cls, tz=None):
      if not tz:
        tz = tzlocal.get_localzone()
      tzaware_utcnow = pytz.utc.localize(cls.mock_utcnow)
      return tz.normalize(tzaware_utcnow.astimezone(tz)).replace(tzinfo=None)
  
    @classmethod
    def today(cls):
      return cls.now().date()

    @classmethod
    def fromtimestamp(cls, timestamp, tz=None):
      if not tz:
        # TODO(sergiyb): This may fail for some unclear reason because pytz
        # doesn't find normal timezones such as 'Europe/Berlin'. This seems to
        # happen only in appengine/chromium_try_flakes tests, and not in tests
        # for this module itself.
        tz = tzlocal.get_localzone()
      tzaware_dt = pytz.utc.localize(cls.utcfromtimestamp(timestamp))
      return tz.normalize(tzaware_dt.astimezone(tz)).replace(tzinfo=None)
  
  # Our metaclass must be derived from the parent class metaclass, but if the
  # parent class doesn't have one, we use 'type' type.
  class MockDateMeta(original_date.__dict__.get('__metaclass__', type)):
    @classmethod
    def __instancecheck__(cls, instance):
      return isinstance(instance, original_date)

  class _MockDate(original_date):
    __metaclass__ = MockDateMeta

    @classmethod
    def today(cls):
      return _MockDateTime.today()

    @classmethod
    def fromtimestamp(cls, timestamp, tz=None):
      return _MockDateTime.fromtimestamp(timestamp, tz).date()

  def decorator(func):
    @functools.wraps(func)
    def wrapper(*args, **kwargs):
      with mock.patch('datetime.datetime', _MockDateTime):
        with mock.patch('datetime.date', _MockDate):
          return func(*args, **kwargs)
    return wrapper
  return decorator

Example 113

Project: girder Source File: assetstore_test.py
    @moto.mock_s3bucket_path
    def testS3AssetstoreAdapter(self):
        # Delete the default assetstore
        self.model('assetstore').remove(self.assetstore)
        s3Regex = r'^https://s3.amazonaws.com(:443)?/bucketname/foo/bar'

        params = {
            'name': 'S3 Assetstore',
            'type': AssetstoreType.S3,
            'bucket': '',
            'accessKeyId': 'someKey',
            'secret': 'someSecret',
            'prefix': '/foo/bar/'
        }

        # Validation should fail with empty bucket name
        resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json, {
            'type': 'validation',
            'field': 'bucket',
            'message': 'Bucket must not be empty.'
        })

        params['bucket'] = 'bucketname'
        # Validation should fail with a missing bucket
        resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json, {
            'type': 'validation',
            'field': 'bucket',
            'message': 'Unable to write into bucket "bucketname".'
        })

        # Validation should fail with a bogus service name
        params['service'] = 'ftp://nowhere'
        resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params)
        self.assertStatus(resp, 400)
        del params['service']

        # Create a bucket (mocked using moto), so that we can create an
        # assetstore in it
        botoParams = makeBotoConnectParams(params['accessKeyId'],
                                           params['secret'])
        bucket = mock_s3.createBucket(botoParams, 'bucketname')

        # Create an assetstore
        resp = self.request(path='/assetstore', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        assetstore = self.model('assetstore').load(resp.json['_id'])

        # Set the assetstore to current.  This is really to test the edit
        # assetstore code.
        params['current'] = True
        resp = self.request(path='/assetstore/%s' % assetstore['_id'],
                            method='PUT', user=self.admin, params=params)
        self.assertStatusOk(resp)

        # Test init for a single-chunk upload
        folders = self.model('folder').childFolders(self.admin, 'user')
        parentFolder = six.next(folders)
        params = {
            'parentType': 'folder',
            'parentId': parentFolder['_id'],
            'name': 'My File.txt',
            'size': 1024,
            'mimeType': 'text/plain'
        }
        resp = self.request(path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['received'], 0)
        self.assertEqual(resp.json['size'], 1024)
        self.assertEqual(resp.json['behavior'], 's3')

        singleChunkUpload = resp.json
        s3Info = singleChunkUpload['s3']
        self.assertEqual(s3Info['chunked'], False)
        self.assertIsInstance(s3Info['chunkLength'], int)
        self.assertEqual(s3Info['request']['method'], 'PUT')
        six.assertRegex(self, s3Info['request']['url'], s3Regex)
        self.assertEqual(s3Info['request']['headers']['x-amz-acl'], 'private')

        # Test resume of a single-chunk upload
        resp = self.request(path='/file/offset', method='GET', user=self.admin,
                            params={'uploadId': resp.json['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['method'], 'PUT')
        self.assertTrue('headers' in resp.json)
        six.assertRegex(self, resp.json['url'], s3Regex)

        # Test finalize for a single-chunk upload
        resp = self.request(
            path='/file/completion', method='POST', user=self.admin,
            params={'uploadId': singleChunkUpload['_id']})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['size'], 1024)
        self.assertEqual(resp.json['assetstoreId'], str(assetstore['_id']))
        self.assertFalse('s3Key' in resp.json)
        self.assertFalse('relpath' in resp.json)

        file = self.model('file').load(resp.json['_id'], force=True)
        self.assertTrue('s3Key' in file)
        six.assertRegex(self, file['relpath'], '^/bucketname/foo/bar/')

        # Test init for a multi-chunk upload
        params['size'] = 1024 * 1024 * 1024 * 5
        resp = self.request(path='/file', method='POST', user=self.admin, params=params)
        self.assertStatusOk(resp)

        multiChunkUpload = resp.json
        s3Info = multiChunkUpload['s3']
        self.assertEqual(s3Info['chunked'], True)
        self.assertIsInstance(s3Info['chunkLength'], int)
        self.assertEqual(s3Info['request']['method'], 'POST')
        six.assertRegex(self, s3Info['request']['url'], s3Regex)

        # Test uploading a chunk
        resp = self.request(path='/file/chunk', method='POST',
                            user=self.admin, params={
                                'uploadId': multiChunkUpload['_id'],
                                'offset': 0,
                                'chunk': json.dumps({
                                    'partNumber': 1,
                                    's3UploadId': 'abcd'
                                })
                            })
        self.assertStatusOk(resp)
        six.assertRegex(self, resp.json['s3']['request']['url'], s3Regex)
        self.assertEqual(resp.json['s3']['request']['method'], 'PUT')

        # We should not be able to call file/offset with multi-chunk upload
        resp = self.request(path='/file/offset', method='GET', user=self.admin,
                            params={'uploadId': multiChunkUpload['_id']})
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json, {
            'type': 'validation',
            'message': 'You should not call requestOffset on a chunked '
                       'direct-to-S3 upload.'
        })

        # Test finalize for a multi-chunk upload
        resp = self.request(
            path='/file/completion', method='POST', user=self.admin,
            params={'uploadId': multiChunkUpload['_id']})
        largeFile = resp.json
        self.assertStatusOk(resp)
        six.assertRegex(self, resp.json['s3FinalizeRequest']['url'], s3Regex)
        self.assertEqual(resp.json['s3FinalizeRequest']['method'], 'POST')

        # Test init for an empty file (should be no-op)
        params['size'] = 0
        resp = self.request(path='/file', method='POST', user=self.admin, params=params)
        emptyFile = resp.json
        self.assertStatusOk(resp)
        self.assertFalse('behavior' in resp.json)
        self.assertFalse('s3' in resp.json)

        # Test download for an empty file
        resp = self.request(path='/file/%s/download' % emptyFile['_id'],
                            user=self.admin, method='GET', isJson=False)
        self.assertStatusOk(resp)
        self.assertEqual(self.getBody(resp), '')
        self.assertEqual(resp.headers['Content-Length'], 0)
        self.assertEqual(resp.headers['Content-Disposition'],
                         'attachment; filename="My File.txt"')

        # Test download of a non-empty file
        resp = self.request(path='/file/%s/download' % largeFile['_id'],
                            user=self.admin, method='GET', isJson=False)
        self.assertStatus(resp, 303)
        six.assertRegex(self, resp.headers['Location'], s3Regex)

        # Test download of a non-empty file, with Content-Disposition=inline.
        # Expect the special S3 header response-content-disposition.
        params = {'contentDisposition': 'inline'}
        inlineRegex = r'response-content-disposition=' + \
                      'inline%3B\+filename%3D%22My\+File.txt%22'
        resp = self.request(
            path='/file/%s/download' % largeFile['_id'], user=self.admin, method='GET',
            isJson=False, params=params)
        self.assertStatus(resp, 303)
        six.assertRegex(self, resp.headers['Location'], s3Regex)
        six.assertRegex(self, resp.headers['Location'], inlineRegex)

        # Test download as part of a streaming zip
        @httmock.all_requests
        def s3_pipe_mock(url, request):
            if url.netloc.startswith('s3.amazonaws.com') and url.scheme == 'https':
                return 'dummy file contents'
            else:
                raise Exception('Unexpected url %s' % url)

        with httmock.HTTMock(s3_pipe_mock):
            resp = self.request(
                '/folder/%s/download' % parentFolder['_id'],
                method='GET', user=self.admin, isJson=False)
            self.assertStatusOk(resp)
            zip = zipfile.ZipFile(io.BytesIO(self.getBody(resp, text=False)), 'r')
            self.assertTrue(zip.testzip() is None)

            extracted = zip.read('Public/My File.txt')
            self.assertEqual(extracted, b'dummy file contents')

        # Attempt to import item directly into user; should fail
        resp = self.request(
            '/assetstore/%s/import' % assetstore['_id'], method='POST', params={
                'importPath': '/foo/bar',
                'destinationType': 'user',
                'destinationId': self.admin['_id']
            }, user=self.admin)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json['message'], 'Keys cannot be imported directly underneath a user.')

        # Import existing data from S3
        resp = self.request('/folder', method='POST', params={
            'parentType': 'folder',
            'parentId': parentFolder['_id'],
            'name': 'import destinaton'
        }, user=self.admin)
        self.assertStatusOk(resp)
        importFolder = resp.json

        resp = self.request(
            '/assetstore/%s/import' % assetstore['_id'], method='POST', params={
                'importPath': '',
                'destinationType': 'folder',
                'destinationId': importFolder['_id'],
            }, user=self.admin)
        self.assertStatusOk(resp)

        # Data should now appear in the tree
        resp = self.request('/folder', user=self.admin, params={
            'parentId': importFolder['_id'],
            'parentType': 'folder'
        })
        self.assertStatusOk(resp)
        children = resp.json
        self.assertEqual(len(children), 1)
        self.assertEqual(children[0]['name'], 'foo')

        resp = self.request('/folder', user=self.admin, params={
            'parentId': children[0]['_id'],
            'parentType': 'folder'
        })
        self.assertStatusOk(resp)
        children = resp.json
        self.assertEqual(len(children), 1)
        self.assertEqual(children[0]['name'], 'bar')

        resp = self.request('/item', user=self.admin, params={
            'folderId': children[0]['_id']
        })
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        item = resp.json[0]
        self.assertEqual(item['name'], 'test')
        self.assertEqual(item['size'], 0)

        resp = self.request('/item/%s/files' % str(item['_id']),
                            user=self.admin)
        self.assertStatusOk(resp)
        self.assertEqual(len(resp.json), 1)
        self.assertFalse('imported' in resp.json[0])
        self.assertFalse('relpath' in resp.json[0])
        file = self.model('file').load(resp.json[0]['_id'], force=True)
        self.assertTrue(file['imported'])
        self.assertFalse('relpath' in file)
        self.assertEqual(file['size'], 0)
        self.assertEqual(file['assetstoreId'], assetstore['_id'])
        self.assertTrue(bucket.get_key('/foo/bar/test') is not None)

        # Deleting an imported file should not delete it from S3
        with mock.patch('girder.events.daemon.trigger') as daemon:
            resp = self.request('/item/%s' % str(item['_id']), method='DELETE', user=self.admin)
            self.assertStatusOk(resp)
            self.assertEqual(len(daemon.mock_calls), 0)

        # Create the file key in the moto s3 store so that we can test that it
        # gets deleted.
        file = self.model('file').load(largeFile['_id'], user=self.admin)
        bucket.initiate_multipart_upload(file['s3Key'])
        key = bucket.new_key(file['s3Key'])
        key.set_contents_from_string("test")

        # Test delete for a non-empty file
        resp = self.request(path='/file/%s' % largeFile['_id'], user=self.admin, method='DELETE')
        self.assertStatusOk(resp)

        # The file should be gone now
        resp = self.request(path='/file/%s/download' % largeFile['_id'],
                            user=self.admin, method='GET', isJson=False)
        self.assertStatus(resp, 400)
        # The actual delete may still be in the event queue, so we want to
        # check the S3 bucket directly.
        startTime = time.time()
        while True:
            if bucket.get_key(file['s3Key']) is None:
                break
            if time.time()-startTime > 15:
                break  # give up and fail
            time.sleep(0.1)
        self.assertIsNone(bucket.get_key(file['s3Key']))

        resp = self.request(
            path='/folder/%s' % parentFolder['_id'], method='DELETE', user=self.admin)
        self.assertStatusOk(resp)

        # Set the assetstore to read only, attempt to delete it
        assetstore['readOnly'] = True
        assetstore = self.model('assetstore').save(assetstore)

        def fn(*args, **kwargs):
            raise Exception('get_all_multipart_uploads should not be called')

        # Must mock globally (too tricky to get a direct mock.patch)
        old = sys.modules['boto.s3.bucket'].Bucket.get_all_multipart_uploads
        sys.modules['boto.s3.bucket'].Bucket.get_all_multipart_uploads = fn

        try:
            resp = self.request(
                path='/assetstore/%s' % assetstore['_id'], method='DELETE', user=self.admin)
            self.assertStatusOk(resp)
        finally:
            sys.modules['boto.s3.bucket'].Bucket.get_all_multipart_uploads = old

Example 114

Project: treadmill Source File: run_test.py
    @mock.patch('shutil.copy', mock.Mock())
    @mock.patch('treadmill.appmgr.manifest.read', mock.Mock())
    @mock.patch('treadmill.appmgr.run._allocate_network_ports', mock.Mock())
    @mock.patch('treadmill.appmgr.run._create_environ_dir', mock.Mock())
    @mock.patch('treadmill.appmgr.run._create_root_dir', mock.Mock())
    @mock.patch('treadmill.appmgr.run._create_supervision_tree', mock.Mock())
    @mock.patch('treadmill.appmgr.run._prepare_ldpreload', mock.Mock())
    @mock.patch('treadmill.appmgr.run._share_cgroup_info', mock.Mock())
    @mock.patch('treadmill.appmgr.run._unshare_network', mock.Mock())
    @mock.patch('treadmill.fs.mount_bind', mock.Mock())
    @mock.patch('treadmill.supervisor.exec_root_supervisor', mock.Mock())
    @mock.patch('treadmill.subproc.check_call', mock.Mock())
    @mock.patch('treadmill.utils.rootdir',
                mock.Mock(return_value='/treadmill'))
    def test_run(self):
        """Tests appmgr.run sequence, which will result in supervisor exec.
        """
        # access protected module _allocate_network_ports
        # pylint: disable=w0212
        manifest = {
            'shared_network': False,
            'ephemeral_ports': 3,
            'passthrough': [
                'xxx',
                'yyy',
                'zzz'
            ],
            'memory': '100M',
            'host_ip': '172.31.81.67',
            'uniqueid': 'ID1234',
            'services': [
                {
                    'name': 'web_server',
                    'command': '/bin/true',
                    'restart': {
                        'limit': 3,
                        'interval': 60,
                    },
                }
            ],
            'disk': '100G',
            'tickets': True,
            'name': 'proid.myapp#0',
            'system_services': [],
            'environment': 'dev',
            'proid': 'foo',
            'endpoints': [
                {
                    'name': 'http',
                    'port': 8000
                },
                {
                    'name': 'port0',
                    'port': 0
                },
                {
                    'type': 'infra',
                    'name': 'ssh',
                    'port': 0
                }
            ],
            'cpu': '100%'
        }
        treadmill.appmgr.manifest.read.return_value = manifest
        app_unique_name = 'proid.myapp-0-0000000ID1234'
        app_dir = os.path.join(self.root, 'apps', app_unique_name)
        os.makedirs(app_dir)
        mock_nwrk_client = self.app_env.svc_network.make_client.return_value
        network = {
            'vip': '2.2.2.2',
            'gateway': '1.1.1.1',
            'veth': 'testveth.0',
        }
        mock_nwrk_client.wait.return_value = network

        def _fake_allocate_network_ports(_ip, manifest):
            """Mimick inplace manifest modification in _allocate_network_ports.
            """
            manifest['ephemeral_ports'] = ['1', '2', '3']
            return mock.DEFAULT
        treadmill.appmgr.run._allocate_network_ports.side_effect = \
            _fake_allocate_network_ports
        mock_watchdog = mock.Mock()

        treadmill.subproc.EXECUTABLES['treadmill_bind_preload.so'] = (
            '/some/$LIB/treadmill_bind_preload.so')

        app_run.run(
            self.app_env, app_dir, mock_watchdog, terminated=()
        )

        # Check that port allocation is correctly called.
        # XXX(boysson): potential mock bug: assert_call expects the vip since
        #               manifest is modified in place even though the vip are
        #               allocated *after*.
        manifest['vip'] = {
            'ip0': '1.1.1.1',
            'ip1': '2.2.2.2',
        }
        manifest['network'] = network
        manifest['ephemeral_ports'] = ['1', '2', '3']
        treadmill.appmgr.run._allocate_network_ports.assert_called_with(
            '172.31.81.67', manifest,
        )
        # Make sure, post modification, that the manifest is readable by other.
        st = os.stat(os.path.join(app_dir, 'state.yml'))
        self.assertTrue(st.st_mode & stat.S_IRUSR)
        self.assertTrue(st.st_mode & stat.S_IRGRP)
        self.assertTrue(st.st_mode & stat.S_IROTH)
        self.assertTrue(st.st_mode & stat.S_IWUSR)
        self.assertFalse(st.st_mode & stat.S_IWGRP)
        self.assertFalse(st.st_mode & stat.S_IWOTH)
        # State yml is what is copied in the container
        shutil.copy.assert_called_with(
            os.path.join(app_dir, 'state.yml'),
            os.path.join(app_dir, 'root', 'app.yml'),
        )

        # Network unshare
        app = utils.to_obj(manifest)
        treadmill.appmgr.run._unshare_network.assert_called_with(
            self.app_env, app
        )
        # Create root dir
        treadmill.appmgr.run._create_root_dir.assert_called_with(
            self.app_env,
            app_dir,
            os.path.join(app_dir, 'root'),
            app,
        )
        # XXX(boysson): Missing environ_dir/manifest_dir tests
        # Create supervision tree
        treadmill.appmgr.run._create_supervision_tree.assert_called_with(
            app_dir,
            self.app_env.app_events_dir,
            app
        )
        treadmill.appmgr.run._share_cgroup_info.assert_called_with(
            app,
            os.path.join(app_dir, 'root'),
        )
        # Ephemeral LDPRELOAD
        treadmill.appmgr.run._prepare_ldpreload.assert_called_with(
            os.path.join(app_dir, 'root'),
            ['/some/$LIB/treadmill_bind_preload.so']
        )
        # Misc bind mounts
        treadmill.fs.mount_bind.assert_has_calls([
            mock.call(
                os.path.join(app_dir, 'root'),
                '/etc/resolv.conf',
                bind_opt='--bind',
                target=os.path.join(app_dir, 'root/.etc/resolv.conf')
            ),
            mock.call(
                os.path.join(app_dir, 'root'),
                '/etc/ld.so.preload',
                bind_opt='--bind',
                target=os.path.join(app_dir, 'root/.etc/ld.so.preload')
            ),
            mock.call(
                os.path.join(app_dir, 'root'),
                '/etc/pam.d/sshd',
                bind_opt='--bind',
                target=os.path.join(app_dir, 'root/treadmill/etc/pam.d/sshd')
            ),
        ])

        self.assertTrue(mock_watchdog.remove.called)

Example 115

Project: edx-platform Source File: test_shib.py
    @unittest.skipUnless(settings.FEATURES.get('AUTH_USE_SHIB'), "AUTH_USE_SHIB not set")
    def test_shib_login(self):
        """
        Tests that:
          * shib credentials that match an existing ExternalAuthMap with a linked active user logs the user in
          * shib credentials that match an existing ExternalAuthMap with a linked inactive user shows error page
          * shib credentials that match an existing ExternalAuthMap without a linked user and also match the email
            of an existing user without an existing ExternalAuthMap links the two and log the user in
          * shib credentials that match an existing ExternalAuthMap without a linked user and also match the email
            of an existing user that already has an ExternalAuthMap causes an error (403)
          * shib credentials that do not match an existing ExternalAuthMap causes the registration form to appear
        """
        # pylint: disable=too-many-statements

        user_w_map = UserFactory.create(email='[email protected]')
        extauth = ExternalAuthMap(external_id='[email protected]',
                                  external_email='',
                                  external_domain='shib:https://idp.stanford.edu/',
                                  external_credentials="",
                                  user=user_w_map)
        user_wo_map = UserFactory.create(email='[email protected]')
        user_w_map.save()
        user_wo_map.save()
        extauth.save()

        inactive_user = UserFactory.create(email='[email protected]')
        inactive_user.is_active = False
        inactive_extauth = ExternalAuthMap(external_id='[email protected]',
                                           external_email='',
                                           external_domain='shib:https://idp.stanford.edu/',
                                           external_credentials="",
                                           user=inactive_user)
        inactive_user.save()
        inactive_extauth.save()

        idps = ['https://idp.stanford.edu/', 'https://someother.idp.com/']
        remote_users = ['[email protected]', '[email protected]',
                        'testuser2@someother_idp.com', '[email protected]']

        for idp in idps:
            for remote_user in remote_users:

                self.client.logout()
                with patch('openedx.core.djangoapps.external_auth.views.AUDIT_LOG') as mock_audit_log:
                    response = self.client.get(
                        reverse('shib-login'),
                        **{
                            'Shib-Identity-Provider': idp,
                            'mail': remote_user,
                            'REMOTE_USER': remote_user,
                        }
                    )
                audit_log_calls = mock_audit_log.method_calls

                if idp == "https://idp.stanford.edu/" and remote_user == '[email protected]':
                    self.assertRedirects(response, '/dashboard')
                    self.assertEquals(int(self.client.session['_auth_user_id']), user_w_map.id)
                    # verify logging:
                    self.assertEquals(len(audit_log_calls), 2)
                    self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
                    method_name, args, _kwargs = audit_log_calls[1]
                    self.assertEquals(method_name, 'info')
                    self.assertEquals(len(args), 1)
                    self.assertIn(u'Login success', args[0])
                    self.assertIn(remote_user, args[0])
                elif idp == "https://idp.stanford.edu/" and remote_user == '[email protected]':
                    self.assertEqual(response.status_code, 403)
                    self.assertIn("Account not yet activated: please look for link in your email", response.content)
                    # verify logging:
                    self.assertEquals(len(audit_log_calls), 2)
                    self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
                    method_name, args, _kwargs = audit_log_calls[1]
                    self.assertEquals(method_name, 'warning')
                    self.assertEquals(len(args), 1)
                    self.assertIn(u'is not active after external login', args[0])
                    # self.assertEquals(remote_user, args[1])
                elif idp == "https://idp.stanford.edu/" and remote_user == '[email protected]':
                    self.assertIsNotNone(ExternalAuthMap.objects.get(user=user_wo_map))
                    self.assertRedirects(response, '/dashboard')
                    self.assertEquals(int(self.client.session['_auth_user_id']), user_wo_map.id)
                    # verify logging:
                    self.assertEquals(len(audit_log_calls), 2)
                    self._assert_shib_login_is_logged(audit_log_calls[0], remote_user)
                    method_name, args, _kwargs = audit_log_calls[1]
                    self.assertEquals(method_name, 'info')
                    self.assertEquals(len(args), 1)
                    self.assertIn(u'Login success', args[0])
                    self.assertIn(remote_user, args[0])
                elif idp == "https://someother.idp.com/" and remote_user in \
                            ['[email protected]', '[email protected]', '[email protected]']:
                    self.assertEqual(response.status_code, 403)
                    self.assertIn("You have already created an account using an external login", response.content)
                    # no audit logging calls
                    self.assertEquals(len(audit_log_calls), 0)
                else:
                    self.assertEqual(response.status_code, 200)
                    self.assertContains(response,
                                        (u"Preferences for {platform_name}"
                                         .format(platform_name=settings.PLATFORM_NAME)))
                    # no audit logging calls
                    self.assertEquals(len(audit_log_calls), 0)

Example 116

Project: socorro Source File: test_legacy_processor.py
    def test_create_basic_processed_crash_normal(self):
        config = setup_config_with_mocks()
        config.collect_addon = False
        config.collect_crash_process = False
        mocked_transform_rules_str = \
            'socorro.processor.legacy_processor.TransformRuleSystem'
        with mock.patch(mocked_transform_rules_str) as m_transform_class:
            m_transform = mock.Mock()
            m_transform_class.return_value = m_transform
            m_transform.attach_mock(mock.Mock(), 'apply_all_rules')
            utc_now_str = 'socorro.processor.legacy_processor.utc_now'
            with mock.patch(utc_now_str) as m_utc_now:
                m_utc_now.return_value = datetime(2012, 5, 4, 15, 11,
                                                  tzinfo=UTC)

                started_timestamp = datetime(2012, 5, 4, 15, 10, tzinfo=UTC)

                raw_crash = canonical_standard_raw_crash
                leg_proc = LegacyCrashProcessor(config, config.mock_quit_fn)
                processor_notes = []

                # test 01
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                assert 'exploitability' in processed_crash
                eq_(
                  processed_crash,
                  dict(cannonical_basic_processed_crash)
                )

                # test 02
                processor_notes = []
                raw_crash_missing_product = copy.deepcopy(raw_crash)
                del raw_crash_missing_product['ProductName']
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash_missing_product,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                processed_crash_missing_product = \
                    copy.copy(cannonical_basic_processed_crash)
                processed_crash_missing_product.product = None
                eq_(
                  processed_crash,
                  processed_crash_missing_product
                )
                ok_('WARNING: raw_crash missing ProductName' in
                                processor_notes)
                eq_(len(processor_notes), 1)

                # test 03
                processor_notes = []
                raw_crash_missing_version = copy.deepcopy(raw_crash)
                del raw_crash_missing_version['Version']
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash_missing_version,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                processed_crash_missing_version = \
                    copy.copy(cannonical_basic_processed_crash)
                processed_crash_missing_version.version = None
                eq_(
                  processed_crash,
                  processed_crash_missing_version
                )
                ok_('WARNING: raw_crash missing Version' in
                                processor_notes)
                eq_(len(processor_notes), 1)

                # test 04
                processor_notes = []
                raw_crash_with_hangid = copy.deepcopy(raw_crash)
                raw_crash_with_hangid.HangID = \
                    '30cb3212-b61d-4d1f-85ae-3bc4bcaa0504'
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash_with_hangid,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                processed_crash_with_hangid = \
                    copy.copy(cannonical_basic_processed_crash)
                processed_crash_with_hangid.hangid = \
                    raw_crash_with_hangid.HangID
                processed_crash_with_hangid.hang_type = -1
                eq_(
                  processed_crash,
                  processed_crash_with_hangid
                )
                eq_(len(processor_notes), 0)

                # test 05
                processor_notes = []
                raw_crash_with_pluginhang = copy.deepcopy(raw_crash)
                raw_crash_with_pluginhang.PluginHang = '1'
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash_with_pluginhang,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                processed_crash_with_pluginhang = \
                    copy.copy(cannonical_basic_processed_crash)
                processed_crash_with_pluginhang.hangid = \
                    'fake-3bc4bcaa-b61d-4d1f-85ae-30cb32120504'
                processed_crash_with_pluginhang.hang_type = -1
                eq_(
                  processed_crash,
                  processed_crash_with_pluginhang
                )
                eq_(len(processor_notes), 0)

                # test 06
                processor_notes = []
                raw_crash_with_hang_only = copy.deepcopy(raw_crash)
                raw_crash_with_hang_only.Hang = 16
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash_with_hang_only,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                processed_crash_with_hang_only = \
                    copy.copy(cannonical_basic_processed_crash)
                processed_crash_with_hang_only.hang_type = 1
                eq_(
                  processed_crash,
                  processed_crash_with_hang_only
                )
                eq_(len(processor_notes), 0)
                leg_proc._statistics.assert_has_calls(
                    [
                        mock.call.incr('restarts'),
                    ],
                    any_order=True
                )

                # test 07
                processor_notes = []
                raw_crash_with_hang_only = copy.deepcopy(raw_crash)
                raw_crash_with_hang_only.Hang = 'bad value'
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  raw_crash_with_hang_only,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                processed_crash_with_hang_only = \
                    copy.copy(cannonical_basic_processed_crash)
                processed_crash_with_hang_only.hang_type = 0
                eq_(
                  processed_crash,
                  processed_crash_with_hang_only
                )
                eq_(len(processor_notes), 0)
                leg_proc._statistics.assert_has_calls(
                    [
                        mock.call.incr('restarts'),
                    ],
                    any_order=True
                )

                # test 08
                processor_notes = []
                bad_raw_crash = copy.deepcopy(raw_crash)
                bad_raw_crash['SecondsSinceLastCrash'] = 'badness'
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  bad_raw_crash,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                eq_(processed_crash.last_crash, None)
                ok_(
                    'non-integer value of "SecondsSinceLastCrash"' in
                    processor_notes
                )

                # test 09
                processor_notes = []
                bad_raw_crash = copy.deepcopy(raw_crash)
                bad_raw_crash['CrashTime'] = 'badness'
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  bad_raw_crash,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                eq_(processed_crash.crash_time, 0)
                ok_(
                    'non-integer value of "CrashTime"' in processor_notes
                )

                # test 10
                processor_notes = []
                bad_raw_crash = copy.deepcopy(raw_crash)
                bad_raw_crash['StartupTime'] = 'badness'
                bad_raw_crash['InstallTime'] = 'more badness'
                bad_raw_crash['CrashTime'] = 'even more badness'
                processed_crash = leg_proc._create_basic_processed_crash(
                  '3bc4bcaa-b61d-4d1f-85ae-30cb32120504',
                  bad_raw_crash,
                  datetimeFromISOdateString(raw_crash.submitted_timestamp),
                  started_timestamp,
                  processor_notes,
                )
                eq_(processed_crash.install_age, 0)
                ok_(
                    'non-integer value of "StartupTime"' in processor_notes
                )
                ok_(
                    'non-integer value of "InstallTime"' in processor_notes
                )
                ok_(
                    'non-integer value of "CrashTime"' in processor_notes
                )

Example 117

Project: grr Source File: email_alerts_test.py
  def testSendEmail(self):
    # This is already patched out in tests but in this specific test we
    # are interested in the results so we just add another patcher.
    self.smtp_patcher = mock.patch("smtplib.SMTP")
    self.mock_smtp = self.smtp_patcher.start()
    try:
      testdomain = "test.com"
      with test_lib.ConfigOverrider({"Logging.domain": testdomain}):
        smtp_conn = self.mock_smtp.return_value

        # Single fully qualified address
        to_address = "[email protected]"
        from_address = "[email protected]"
        subject = "test"
        message = ""
        email_alerts.EMAIL_ALERTER.SendEmail(to_address, from_address, subject,
                                             message)
        c_from, c_to, msg = smtp_conn.sendmail.call_args[0]
        self.assertItemsEqual(from_address, c_from)
        self.assertItemsEqual([to_address], c_to)
        self.assertFalse("CC:" in msg)

        # Single fully qualified address as rdf_standard.DomainEmailAddress
        to_address = rdf_standard.DomainEmailAddress("testto@%s" % testdomain)
        from_address = "[email protected]"
        subject = "test"
        message = ""
        email_alerts.EMAIL_ALERTER.SendEmail(to_address, from_address, subject,
                                             message)
        c_from, c_to, msg = smtp_conn.sendmail.call_args[0]
        self.assertItemsEqual(from_address, c_from)
        self.assertItemsEqual([to_address], c_to)
        self.assertFalse("CC:" in msg)

        # Multiple unqualified to addresses, one cc
        to_address = "testto,abc,def"
        to_address_expected = [
            x + testdomain for x in ["testto@", "abc@", "def@", "testcc@"]
        ]
        cc_address = "testcc"
        email_alerts.EMAIL_ALERTER.SendEmail(
            to_address, from_address, subject, message, cc_addresses=cc_address)
        c_from, c_to, message = smtp_conn.sendmail.call_args[0]
        self.assertItemsEqual(from_address, c_from)
        self.assertItemsEqual(to_address_expected, c_to)
        self.assertTrue("CC: testcc@%s" % testdomain in message)

        # Multiple unqualified to addresses as DomainEmailAddress, one cc
        to_address = [
            rdf_standard.DomainEmailAddress("testto@%s" % testdomain),
            rdf_standard.DomainEmailAddress("abc@%s" % testdomain),
            rdf_standard.DomainEmailAddress("def@%s" % testdomain)
        ]
        to_address_expected = [
            x + testdomain for x in ["testto@", "abc@", "def@", "testcc@"]
        ]
        cc_address = "testcc"
        email_alerts.EMAIL_ALERTER.SendEmail(
            to_address, from_address, subject, message, cc_addresses=cc_address)
        c_from, c_to, message = smtp_conn.sendmail.call_args[0]
        self.assertItemsEqual(from_address, c_from)
        self.assertItemsEqual(to_address_expected, c_to)
        self.assertTrue("CC: testcc@%s" % testdomain in message)

        # Multiple unqualified to addresses, two cc, message_id set
        to_address = "testto,abc,def"
        to_address_expected = [
            x + testdomain
            for x in ["testto@", "abc@", "def@", "testcc@", "testcc2@"]
        ]
        cc_address = "testcc,testcc2"
        email_msg_id = "123123"
        email_alerts.EMAIL_ALERTER.SendEmail(
            to_address,
            from_address,
            subject,
            message,
            cc_addresses=cc_address,
            message_id=email_msg_id)
        c_from, c_to, message = smtp_conn.sendmail.call_args[0]
        self.assertItemsEqual(from_address, c_from)
        self.assertItemsEqual(to_address_expected, c_to)
        self.assertTrue("CC: testcc@%s,testcc2@%s" %
                        (testdomain, testdomain) in message)
        self.assertTrue("Message-ID: %s" % email_msg_id)

      # Multiple address types, two cc, no default domain
      with test_lib.ConfigOverrider({"Logging.domain": None}):
        to_address = [
            "testto@localhost", "hij",
            rdf_standard.DomainEmailAddress("klm@localhost")
        ]
        cc_address = "testcc,testcc2@localhost"
        to_address_expected = [
            "testto@localhost", "hij@localhost", "klm@localhost",
            "testcc@localhost", "testcc2@localhost"
        ]
        email_alerts.EMAIL_ALERTER.SendEmail(
            to_address, from_address, subject, message, cc_addresses=cc_address)
        c_from, c_to, message = smtp_conn.sendmail.call_args[0]
        self.assertItemsEqual(from_address, c_from)
        self.assertItemsEqual(to_address_expected, c_to)
        self.assertTrue("CC: testcc@%s,testcc2@%s" %
                        (testdomain, testdomain) in message)
    finally:
      self.smtp_patcher.stop()

Example 118

Project: ceilometer Source File: test_gnocchi.py
    @mock.patch('ceilometer.dispatcher.gnocchi.LOG')
    @mock.patch('gnocchiclient.v1.client.Client')
    def test_workflow(self, fakeclient_cls, logger):
        self.dispatcher = gnocchi.GnocchiDispatcher(self.conf.conf)

        fakeclient = fakeclient_cls.return_value

        # FIXME(sileht): we don't use urlparse.quote here
        # to ensure / is converted in %2F
        # temporary disabled until we find a solution
        # on gnocchi side. Current gnocchiclient doesn't
        # encode the resource_id
        resource_id = self.sample['resource_id']  # .replace("/", "%2F"),
        metric_name = self.sample['counter_name']
        gnocchi_id = gnocchi_utils.encode_resource_id(resource_id)

        expected_calls = [
            mock.call.capabilities.list(),
            mock.call.metric.batch_resources_metrics_measures(
                {gnocchi_id: {metric_name: self.measures_attributes}})
        ]
        expected_debug = [
            mock.call('gnocchi project found: %s',
                      'a2d42c23-d518-46b6-96ab-3fba2e146859'),
        ]

        measures_posted = False
        batch_side_effect = []
        if self.post_measure_fail:
            batch_side_effect += [Exception('boom!')]
        elif not self.resource_exists or not self.metric_exists:
            batch_side_effect += [
                gnocchi_exc.BadRequest(
                    400, "Unknown metrics: %s/%s" % (gnocchi_id,
                                                     metric_name))]
            attributes = self.postable_attributes.copy()
            attributes.update(self.patchable_attributes)
            attributes['id'] = self.sample['resource_id']
            attributes['metrics'] = dict((metric_name, {})
                                         for metric_name in self.metric_names)
            for k, v in six.iteritems(attributes['metrics']):
                if k == 'disk.root.size':
                    v['unit'] = 'GB'
                    continue
                if k == 'hardware.ipmi.node.power':
                    v['unit'] = 'W'
                    continue
            expected_calls.append(mock.call.resource.create(
                self.resource_type, attributes))

            if self.create_resource_fail:
                fakeclient.resource.create.side_effect = [Exception('boom!')]
            elif self.resource_exists:
                fakeclient.resource.create.side_effect = [
                    gnocchi_exc.ResourceAlreadyExists(409)]

                expected_calls.append(mock.call.metric.create({
                    'name': self.sample['counter_name'],
                    'unit': self.sample['counter_unit'],
                    'resource_id': resource_id}))
                if self.create_metric_fail:
                    fakeclient.metric.create.side_effect = [Exception('boom!')]
                elif self.metric_exists:
                    fakeclient.metric.create.side_effect = [
                        gnocchi_exc.NamedMetricAlreadyExists(409)]
                else:
                    fakeclient.metric.create.side_effect = [None]

            else:  # not resource_exists
                expected_debug.append(mock.call(
                    'Resource %s created', self.sample['resource_id']))

            if not self.create_resource_fail and not self.create_metric_fail:
                expected_calls.append(
                    mock.call.metric.batch_resources_metrics_measures(
                        {gnocchi_id: {metric_name: self.measures_attributes}})
                )

                if self.retry_post_measures_fail:
                    batch_side_effect += [Exception('boom!')]
                else:
                    measures_posted = True

        else:
            measures_posted = True

        if measures_posted:
            batch_side_effect += [None]
            expected_debug.append(
                mock.call("%(measures)d measures posted against %(metrics)d "
                          "metrics through %(resources)d resources", dict(
                              measures=len(self.measures_attributes),
                              metrics=1, resources=1))
            )

        if self.patchable_attributes:
            expected_calls.append(mock.call.resource.update(
                self.resource_type, resource_id,
                self.patchable_attributes))
            if self.update_resource_fail:
                fakeclient.resource.update.side_effect = [Exception('boom!')]
            else:
                expected_debug.append(mock.call(
                    'Resource %s updated', self.sample['resource_id']))

        batch = fakeclient.metric.batch_resources_metrics_measures
        batch.side_effect = batch_side_effect

        self.dispatcher.record_metering_data([self.sample])

        # Check that the last log message is the expected one
        if (self.post_measure_fail or self.create_metric_fail
                or self.create_resource_fail
                or self.retry_post_measures_fail
                or (self.update_resource_fail and self.patchable_attributes)):
            logger.error.assert_called_with('boom!', exc_info=True)
        else:
            self.assertEqual(0, logger.error.call_count)
        self.assertEqual(expected_calls, fakeclient.mock_calls)
        self.assertEqual(expected_debug, logger.debug.mock_calls)

Example 119

Project: kardboard Source File: test_statelog.py
    @mock.patch('kardboard.models.statelog.now')
    def test_card_going_back_and_forth(self, mocked_now):
        TODO = 'Todo'
        PLANNING = 'Planning'
        DOING = 'Doing'
        TESTING = 'Testing'
        DEPLOYING = 'Deploying'
        DONE = 'Done'

        # Created a new card 30 days ago in Todo
        mocked_now.return_value = datetime.now() - relativedelta(days=30)
        card = self.make_card(state=TODO)
        card.save()

        self.assertEnteredStateOnceAt(card, TODO, mocked_now.return_value)

        # Now move it to Planning 29 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=29)
        card.state = PLANNING
        card.save()
        self.assertExitedStateOnceAt(card, TODO, mocked_now.return_value)
        self.assertEnteredStateOnceAt(card, PLANNING, mocked_now.return_value)

        # Now move it back to TODO 25 days agao
        mocked_now.return_value = datetime.now() - relativedelta(days=25)
        card.state = TODO
        card.save()

        self.assertExitedStateOnceAt(card, PLANNING, mocked_now.return_value)
        self.assertEnteredStateNTimesRecently(2, card, TODO, mocked_now.return_value)

        # Now move it back to PLANNING 24 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=24)
        card.state = PLANNING
        card.save()

        self.assertExitedStateNTimesRecently(2, card, TODO, mocked_now.return_value)
        self.assertEnteredStateNTimesRecently(2, card, PLANNING, mocked_now.return_value)

        # Now move it to DOING 24 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=24)
        card.state = DOING
        card.save()

        self.assertExitedStateNTimesRecently(2, card, PLANNING, mocked_now.return_value)
        self.assertEnteredStateOnceAt(card, DOING, mocked_now.return_value)

        # Now move it to TESTING 22 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=22)
        card.state = TESTING
        card.save()

        self.assertExitedStateOnceAt(card, DOING, mocked_now.return_value)
        self.assertEnteredStateOnceAt(card, TESTING, mocked_now.return_value)

        # Now move it back to DOING 22 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=20)
        card.state = DOING
        card.save()

        self.assertExitedStateOnceAt(card, TESTING, mocked_now.return_value)
        self.assertEnteredStateNTimesRecently(2, card, DOING, mocked_now.return_value)

        # Now move it to TESTING 19 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=19)
        card.state = TESTING
        card.save()

        self.assertExitedStateNTimesRecently(2, card, DOING, mocked_now.return_value)
        self.assertEnteredStateNTimesRecently(2, card, TESTING, mocked_now.return_value)

        # Now move it back to DOING again again 18 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=18)
        card.state = DOING
        card.save()
        self.assertExitedStateNTimesRecently(2, card, TESTING, mocked_now.return_value)
        self.assertEnteredStateNTimesRecently(3, card, DOING, mocked_now.return_value)

        # Now move it back to TESTING again again 17 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=17)
        card.state = TESTING
        card.save()
        self.assertExitedStateNTimesRecently(3, card, DOING, mocked_now.return_value)
        self.assertEnteredStateNTimesRecently(3, card, TESTING, mocked_now.return_value)

        # Now move it to DEPLOYING 16 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=16)
        card.state = DEPLOYING
        card.save()
        self.assertExitedStateNTimesRecently(3, card, TESTING, mocked_now.return_value)
        self.assertEnteredStateOnceAt(card, DEPLOYING, mocked_now.return_value)

        # Now move it to DONE 16 days ago
        mocked_now.return_value = datetime.now() - relativedelta(days=16)
        card.state = DONE
        card.save()
        self.assertExitedStateOnceAt(card, DEPLOYING, mocked_now.return_value)
        self.assertEnteredStateOnceAt(card, DONE, mocked_now.return_value)

Example 120

Project: fuel-plugins Source File: test_validator_v3.py
    @mock.patch('fuel_plugin_builder.validators.validator_v3.utils')
    def test_check_tasks_schema_validation_passed(self, utils_mock, *args):
        data_sets = [
            [
                {
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
            ],
            [
                {
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
            ],
            [
                {
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'reboot'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'xx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                }
            ],
            [
                {
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'cmd': 'reboot'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'type': 'shell',
                    'parameters': {
                        'timeout': 3,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'yy',
                        'cmd': 'reboot'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'retries': 10,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': '*'
                },
                {
                    'type': 'puppet',
                    'parameters': {
                        'timeout': 3,
                        'retries': 10,
                        'puppet_manifest': 'xx',
                        'puppet_modules': 'xxx'
                    },
                    'stage': 'post_deployment',
                    'role': 'master'
                },
            ]
        ]

        for data in data_sets:
            utils_mock.parse_yaml.return_value = data
            self.validator.check_deployment_tasks()

Example 121

Project: synnefo Source File: api.py
    @override_settings(CYCLADES_DETACHABLE_DISK_TEMPLATES=("ext_vlmc",))
    def test_create_volume(self, mrapi):
        vm = VirtualMachineFactory(
            operstate="ACTIVE",
            flavor__volume_type__disk_template="ext_vlmc")
        vt = VolumeTypeFactory(disk_template="ext_vlmc")
        user = vm.userid
        _data = {"display_name": "test_vol",
                 "size": 2,
                 "volume_type": vt.id}

        # Test standalone create success
        mrapi().ModifyInstance.return_value = 42
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": _data}), "json")
        self.assertSuccess(r)

        # Test create and attach success
        _data["server_id"] = vm.id
        mrapi().ModifyInstance.return_value = 42
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": _data}), "json")
        self.assertSuccess(r)

        # Test create without size, name and server
        for attr in ["display_name", "size", "server_id"]:
            data = deepcopy(_data)
            del data["size"]
            with mocked_quotaholder():
                r = self.post(VOLUMES_URL, user,
                              json.dumps({"volume": data}), "json")
            self.assertBadRequest(r)

        # Test invalid size
        data = deepcopy(_data)
        data["size"] = -2
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": data}), "json")
        self.assertBadRequest(r)

        # Test deleted server or invalid state
        data = deepcopy(_data)
        vm.deleted = True
        vm.save()
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": data}), "json")
        self.assertBadRequest(r)

        vm.deleted = False
        vm.operstate = "ERROR"
        vm.save()
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": data}), "json")
        self.assertBadRequest(r)
        vm.operstate = "ACTIVE"
        vm.save()

        # Test volume type different from VM's flavor or invalid vype
        data = deepcopy(_data)
        for disk_type in ["file", "plain", "drbd", "rbd"]:
            vtype = VolumeTypeFactory(disk_template=disk_type)
            data["volume_type"] = vtype.id
            with mocked_quotaholder():
                r = self.post(VOLUMES_URL, user,
                              json.dumps({"volume": data}), "json")
            self.assertBadRequest(r)
        for vtype in [434132421243, "foo"]:
            data["volume_type"] = vtype
            with mocked_quotaholder():
                r = self.post(VOLUMES_URL, user,
                              json.dumps({"volume": data}), "json")
            self.assertBadRequest(r)

        # Test source for invalid disk template
        for disk_type in ["file", "plain", "drbd", "rbd"]:
            temp_vm = VirtualMachineFactory(
                operstate="ACTIVE",
                flavor__volume_type__disk_template=disk_type)
            for attr in ["snapshot_id", "imageRef"]:
                data = deepcopy(_data)
                data["server_id"] = temp_vm.id
                data[attr] = "3214231-413242134123-431242"
                with mocked_quotaholder():
                    r = self.post(VOLUMES_URL, user,
                                  json.dumps({"volume": data}), "json")
                self.assertBadRequest(r)

        # Test snapshot and image together
        data = deepcopy(_data)
        data["snapshot_id"] = "3214231-413242134123-431242"
        data["imageRef"] = "3214231-413242134123-431242"
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": data}), "json")
        self.assertBadRequest(r)

        # Test with Snapshot source

        # Test unknwon snapshot
        data = deepcopy(_data)
        data["snapshot_id"] = "94321904321-432142134214-23142314"
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": data}), "json")
        self.assertBadRequest(r)

        vm.task = None
        vm.action = None
        vm.save()
        # Test success
        snapshot = Mock()
        snapshot.return_value = {'location': 'pithos://foo',
                                 'mapfile': '1234',
                                 'id': 1,
                                 'name': 'test_image',
                                 'size': 1024,
                                 'is_snapshot': True,
                                 'is_public': True,
                                 'version': 42,
                                 'owner': 'user',
                                 'status': 'AVAILABLE',
                                 'disk_format': 'diskdump'}
        data["snapshot_id"] = 1
        with patch("synnefo.volume.util.get_snapshot", snapshot):
            with mocked_quotaholder():
                r = self.post(VOLUMES_URL, user,
                              json.dumps({"volume": data}), "json")
        self.assertSuccess(r)

        # Test with Snapshot source

        # Test unknwon snapshot
        data = deepcopy(_data)
        data["imageRef"] = "94321904321-432142134214-23142314"
        with mocked_quotaholder():
            r = self.post(VOLUMES_URL, user,
                          json.dumps({"volume": data}), "json")
        self.assertBadRequest(r)

        vm.task = None
        vm.action = None
        vm.save()
        data["server_id"] = vm.id
        # Test success
        image = Mock()
        image.return_value = {'location': 'pithos://foo',
                              'mapfile': '1234',
                              'id': 2,
                              'name': 'test_image',
                              'size': 1024,
                              'is_snapshot': False,
                              'is_image': False,
                              'is_public': True,
                              'owner': 'user',
                              'version': 42,
                              'status': 'AVAILABLE',
                              'disk_format': 'diskdump'}
        data["imageRef"] = 2
        with patch("synnefo.api.util.get_image", image):
            with mocked_quotaholder():
                r = self.post(VOLUMES_URL, user,
                              json.dumps({"volume": data}), "json")
        self.assertSuccess(r)

Example 122

Project: PynamoDB Source File: test_table_connection.py
    def test_put_item(self):
        """
        TableConnection.put_item
        """
        conn = TableConnection(self.test_table_name)
        with patch(PATCH_METHOD) as req:
            req.return_value = DESCRIBE_TABLE_DATA
            conn.describe_table()

        with patch(PATCH_METHOD) as req:
            req.return_value = {}
            conn.put_item(
                'foo-key',
                range_key='foo-range-key',
                attributes={'ForumName': 'foo-value'}
            )
            params = {
                'ReturnConsumedCapacity': 'TOTAL',
                'TableName': self.test_table_name,
                'Item': {'ForumName': {'S': 'foo-value'}, 'Subject': {'S': 'foo-range-key'}}
            }
            self.assertEqual(req.call_args[0][1], params)

        with patch(PATCH_METHOD) as req:
            req.return_value = {}
            conn.put_item(
                'foo-key',
                range_key='foo-range-key',
                attributes={'ForumName': 'foo-value'}
            )
            params = {
                'ReturnConsumedCapacity': 'TOTAL',
                'Item': {
                    'ForumName': {
                        'S': 'foo-value'
                    },
                    'Subject': {
                        'S': 'foo-range-key'
                    }
                },
                'TableName': self.test_table_name
            }
            self.assertEqual(req.call_args[0][1], params)

        with patch(PATCH_METHOD) as req:
            req.return_value = HttpOK(), {}
            conn.put_item(
                'foo-key',
                range_key='foo-range-key',
                attributes={'ForumName': 'foo-value'},
                conditional_operator='and',
                expected={
                    'ForumName': {
                        'Exists': False
                    }
                }
            )
            params = {
                'ReturnConsumedCapacity': 'TOTAL',
                'Item': {
                    'ForumName': {
                        'S': 'foo-value'
                    },
                    'Subject': {
                        'S': 'foo-range-key'
                    }
                },
                'TableName': self.test_table_name,
                'ConditionalOperator': 'AND',
                'Expected': {
                    'ForumName': {
                        'Exists': False
                    }
                }
            }
            self.assertEqual(req.call_args[0][1], params)

Example 123

Project: dd-agent Source File: test_process.py
Function: test_relocated_procfs
    def test_relocated_procfs(self):
        from utils.platform import Platform
        import tempfile
        import shutil
        import uuid

        already_linux = Platform.is_linux()
        unique_process_name = str(uuid.uuid4())
        my_procfs = tempfile.mkdtemp()

        def _fake_procfs(arg, root=my_procfs):
            for key, val in arg.iteritems():
                path = os.path.join(root, key)
                if isinstance(val, dict):
                    os.mkdir(path)
                    _fake_procfs(val, path)
                else:
                    with open(path, "w") as f:
                        f.write(str(val))
        _fake_procfs({
            '1': {
                'status': (
                    "Name:\t%s\nThreads:\t1\n"
                ) % unique_process_name,
                'stat': ('1 (%s) S 0 1 1 ' + ' 0' * 46) % unique_process_name,
                'cmdline': unique_process_name,

            },
            'stat': (
                "cpu  13034 0 18596 380856797 2013 2 2962 0 0 0\n"
                "btime 1448632481\n"
            ),
        })

        config = {
            'init_config': {
                'procfs_path': my_procfs
            },
            'instances': [{
                'name': 'moved_procfs',
                'search_string': [unique_process_name],
                'exact_match': False,
                'ignored_denied_access': True,
                'thresholds': {'warning': [1, 10], 'critical': [1, 100]},
            }]
        }

        version = int(psutil.__version__.replace(".", ""))
        try:
            def import_mock(name, i_globals={}, i_locals={}, fromlist=[], level=-1, orig_import=__import__):
                # _psutil_linux and _psutil_posix are the C bindings; use a mock for those
                if name in ('_psutil_linux', '_psutil_posix') or level >= 1 and ('_psutil_linux' in fromlist or '_psutil_posix' in fromlist):
                    m = MagicMock()
                    # the import system will ask us for our own name
                    m._psutil_linux = m
                    m._psutil_posix = m
                    # there's a version safety check in psutil/__init__.py; this skips it
                    m.version = version
                    return m
                return orig_import(name, i_globals, i_locals, fromlist, level)

            # contextlib.nested is deprecated in favor of with MGR1, MGR2, ... etc, but we have too many mocks to fit on one line and apparently \ line
            # continuation is not flake8 compliant, even when semantically required (as here). Patch is unlikely to throw errors that are suppressed, so
            # the main downside of contextlib is avoided.
            with contextlib.nested(patch('sys.platform', 'linux'),
                                   patch('socket.AF_PACKET', create=True),
                                   patch('__builtin__.__import__', side_effect=import_mock)):
                if not already_linux:
                    # Reloading psutil fails on linux, but we only need to do so if we didn't start out on a linux platform
                    reload(psutil)
                assert Platform.is_linux()

                self.run_check(config, mocks={'get_pagefault_stats': noop_get_pagefault_stats})
        finally:
            shutil.rmtree(my_procfs)
            if not already_linux:
                # restore the original psutil that doesn't have our mocks
                reload(psutil)
            else:
                psutil.PROCFS_PATH = '/proc'

        expected_tags = self.generate_expected_tags(config['instances'][0])
        self.assertServiceCheckOK('process.up', count=1, tags=['process:moved_procfs'])

        self.assertMetric('system.processes.number', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.threads', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.run_time.avg', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.run_time.max', at_least=1, tags=expected_tags)
        self.assertMetric('system.processes.run_time.min', at_least=1, tags=expected_tags)

        self.coverage_report()

Example 124

Project: LASIF Source File: test_actions_component.py
@mock.patch("lasif.tools.Q_discrete.calculate_Q_model")
def test_adjoint_source_finalization_rotated_domain(patch, comm, capsys):
    """
    Tests the adjoint source finalization with a rotated domain.
    """
    # Speed up this test.
    patch.return_value = (np.array([1.6341, 1.0513, 1.5257]),
                          np.array([0.59496, 3.7119, 22.2171]))

    # Set some rotation angle to actually get some rotated things.
    comm.project.domain.rotation_angle_in_degree = 0.1

    comm.iterations.create_new_iteration(
        "1", "ses3d_4_1", comm.query.get_stations_for_all_events(), 8, 100)

    event_name = "GCMT_event_TURKEY_Mag_5.1_2010-3-24-14-11"
    event = comm.events.get(event_name)
    t = event["origin_time"]

    # Create iteration.
    it = comm.iterations.get("1")

    # Fake preprocessed data by copying the synthetics and perturbing them a
    # bit...
    stations = ["HL.ARG", "HT.SIGR"]
    np.random.seed(123456)
    for station in stations:
        s = comm.waveforms.get_waveforms_synthetic(event_name, station,
                                                   it.long_name)
        # Perturb data a bit.
        for tr in s:
            tr.data += np.random.random(len(tr.data)) * 2E-8
        path = comm.waveforms.get_waveform_folder(event_name, "processed",
                                                  it.processing_tag)
        if not os.path.exists(path):
            os.makedirs(path)
        for tr in s:
            tr.write(os.path.join(path, tr.id), format="mseed")

    window_group_manager = comm.windows.get(event, it)

    # Automatic window selection does not work for the terrible test data...
    # Now add only windows that actually have data and synthetics but the
    # data will be too bad to actually extract an adjoint source from.
    for chan in ["HL.ARG..BHE", "HL.ARG..BHN", "HL.ARG..BHZ"]:
        window_group = window_group_manager.get(chan)
        window_group.add_window(starttime=t + 100, endtime=t + 200)
        window_group.write()

    capsys.readouterr()

    # Make sure nothing is rotated as the domain is not rotated.
    rotate_data = rotations.rotate_data
    with mock.patch("lasif.rotations.rotate_data") as patch:
        patch.side_effect = \
            lambda *args, **kwargs: rotate_data(*args, **kwargs)
        comm.actions.finalize_adjoint_sources(it.name, event_name)
    # Once for each synthetic and once for the adjoint source.
    assert patch.call_count == 4

    out, _ = capsys.readouterr()
    assert "Wrote adjoint sources for 1 station(s)" in out

    # Make sure nothing is actually written.
    out = os.path.join(comm.project.paths["output"], "adjoint_sources")
    adj_src_dir = os.path.join(out, os.listdir(out)[0])

    assert os.path.exists(adj_src_dir)
    assert sorted(os.listdir(adj_src_dir)) == sorted(["ad_srcfile",
                                                      "ad_src_1"])

Example 125

Project: anitya Source File: test_flask.py
    @mock.patch('anitya.check_release')
    def test_edit_project(self, patched):
        """ Test the edit_project function. """
        create_distro(self.session)
        create_project(self.session)

        output = self.app.get('/project/1/edit', follow_redirects=True)
        self.assertEqual(output.status_code, 200)
        self.assertTrue(
            b'<ul id="flashes" class="list-group">'
            b'<li class="list-group-item list-group-item-warning">'
            b'Login required</li></ul>' in output.data)

        projects = model.Project.all(self.session)
        self.assertEqual(len(projects), 3)
        self.assertEqual(projects[0].name, 'geany')
        self.assertEqual(projects[0].id, 1)
        self.assertEqual(projects[1].name, 'R2spec')
        self.assertEqual(projects[1].id, 3)
        self.assertEqual(projects[2].name, 'subsurface')
        self.assertEqual(projects[2].id, 2)

        with anitya.app.APP.test_client() as c:
            with c.session_transaction() as sess:
                sess['openid'] = 'openid_url'
                sess['fullname'] = 'Pierre-Yves C.'
                sess['nickname'] = 'pingou'
                sess['email'] = '[email protected]'

            output = c.get('/project/10/edit', follow_redirects=True)
            self.assertEqual(output.status_code, 404)

            output = c.get('/project/1/edit', follow_redirects=True)
            self.assertEqual(output.status_code, 200)

            self.assertTrue(b'<h1>Edit project</h1>' in output.data)
            self.assertTrue(
                b'<td><label for="regex">Regex</label></td>' in output.data)

            data = {
                'name': 'repo_manager',
                'homepage': 'https://pypi.python.org/pypi/repo_manager',
                'backend': 'PyPI',
            }

            output = c.post(
                '/project/1/edit', data=data, follow_redirects=True)
            self.assertEqual(output.status_code, 200)
            self.assertTrue(b'<h1>Edit project</h1>' in output.data)
            self.assertTrue(
                b'<td><label for="regex">Regex</label></td>' in output.data)

            # This should works just fine
            csrf_token = output.data.split(
                b'name="csrf_token" type="hidden" value="')[1].split(b'">')[0]

            data['csrf_token'] = csrf_token

            output = c.post(
                '/project/1/edit', data=data, follow_redirects=True)
            self.assertEqual(output.status_code, 200)
            self.assertTrue(
                b'<li class="list-group-item list-group-item-default">'
                b'Project edited</li>' in output.data)
            self.assertTrue(
                b'<h1>Project: repo_manager</h1>' in output.data)

            # This should fail, the R2spec project already exists
            data = {
                'name': 'R2spec',
                'homepage': 'https://fedorahosted.org/r2spec/',
                'backend': 'folder',
                'csrf_token': csrf_token,
            }

            output = c.post(
                '/project/1/edit', data=data, follow_redirects=True)
            self.assertEqual(output.status_code, 200)
            self.assertTrue(
                b'<li class="list-group-item list-group-item-warning">'
                b'Could not edit this project. Is there '
                b'already a project with these name and homepage?</li>'
                in output.data)
            self.assertTrue(
                b'<h1>Project: repo_manager</h1>' in output.data)

            # check_release off
            output = c.post(
                '/project/3/edit', data=data, follow_redirects=True)
            self.assertEqual(output.status_code, 200)
            self.assertTrue(
                b'<li class="list-group-item list-group-item-default">'
                b'Project edited</li>' in output.data)
            patched.assert_not_called()

            # check_release on
            data['check_release'] = 'on'
            output = c.post(
                '/project/3/edit', data=data, follow_redirects=True)
            self.assertEqual(output.status_code, 200)
            self.assertTrue(
                b'<li class="list-group-item list-group-item-default">'
                b'Project edited</li>' in output.data)
            patched.assert_called_once_with(mock.ANY, mock.ANY)

        projects = model.Project.all(self.session)
        self.assertEqual(len(projects), 3)
        self.assertEqual(projects[0].name, 'R2spec')
        self.assertEqual(projects[0].id, 3)
        self.assertEqual(projects[1].name, 'repo_manager')
        self.assertEqual(projects[1].id, 1)
        self.assertEqual(projects[2].name, 'subsurface')
        self.assertEqual(projects[2].id, 2)

Example 126

Project: PyEMMA Source File: estimator.py
def estimate_param_scan(estimator, X, param_sets, evaluate=None, evaluate_args=None, failfast=True,
                        return_estimators=False, n_jobs=1, progress_reporter=None, show_progress=True):
    """ Runs multiple estimations using a list of parameter settings

    Parameters
    ----------
    estimator : Estimator object or class
        An estimator object that provides an estimate(X, **params) function.
        If only a class is provided here, the Estimator objects will be
        constructed with default parameter settings, and the parameter settings
        from param_sets for each estimation. If you want to specify other
        parameter settings for those parameters not specified in param_sets,
        construct an Estimator before and pass the object.

    param_sets : iterable over dictionaries
        An iterable that provides parameter settings. Each element defines a
        parameter set, for which an estimation will be run using these
        parameters in estimate(X, **params). All other parameter settings will
        be taken from the default settings in the estimator object.

    evaluate : str or list of str, optional
        The given methods or properties will be called on the estimated
        models, and their results will be returned instead of the full models.
        This may be useful for reducing memory overhead.

    evaluate_args: iterable of iterable, optional
        Arguments to be passed to evaluated methods. Note, that size has to match to the size of evaluate.

    failfast : bool
        If True, will raise an exception when estimation failed with an exception
        or trying to calls a method that doesn't exist. If False, will simply
        return None in these cases.

    return_estimators: bool
        If True, return a list estimators in addition to the models.

    show_progress: bool
        if the given estimator supports show_progress interface, we set the flag
        prior doing estimations.

    Return
    ------
    models : list of model objects or evaluated function values
        A list of estimated models in the same order as param_sets. If evaluate
        is given, each element will contain the results from these method
        evaluations.

    estimators (optional) : list of estimator objects. These are returned only
        if return_estimators=True

    Examples
    --------

    Estimate a maximum likelihood Markov model at lag times 1, 2, 3.

    >>> from pyemma.msm.estimators import MaximumLikelihoodMSM, BayesianMSM
    >>>
    >>> dtraj = [0,0,1,2,1,0,1,0,1,2,2,0,0,0,1,1,2,1,0,0,1,2,1,0,0,0,1,1,0,1,2]  # mini-trajectory
    >>> param_sets=param_grid({'lag': [1,2,3]})
    >>>
    >>> estimate_param_scan(MaximumLikelihoodMSM, dtraj, param_sets, evaluate='timescales')
    [array([ 1.24113168,  0.77454377]), array([ 2.48226337,  1.54908754]), array([ 3.72339505,  2.32363131])]

    Now we also want to get samples of the timescales using the BayesianMSM.
    >>> estimate_param_scan(MaximumLikelihoodMSM, dtraj, param_sets, failfast=False,
    ...     evaluate=['timescales', 'timescales_samples']) # doctest: +SKIP
    [[array([ 1.24113168,  0.77454377]), None], [array([ 2.48226337,  1.54908754]), None], [array([ 3.72339505,  2.32363131]), None]]

    We get Nones because the MaximumLikelihoodMSM estimator doesn't provide timescales_samples. Use for example
    a Bayesian estimator for that.

    Now we also want to get samples of the timescales using the BayesianMSM.
    >>> estimate_param_scan(BayesianMSM, dtraj, param_sets, show_progress=False,
    ...     evaluate=['timescales', 'sample_f'], evaluate_args=((), ('timescales', ))) # doctest: +SKIP
    [[array([ 1.24357685,  0.77609028]), [array([ 1.5963252 ,  0.73877883]), array([ 1.29915847,  0.49004912]), array([ 0.90058583,  0.73841786]), ... ]]

    """
    # make sure we have an estimator object
    estimator = get_estimator(estimator)
    if hasattr(estimator, 'show_progress'):
        estimator.show_progress = show_progress
    # if we want to return estimators, make clones. Otherwise just copy references.
    # For parallel processing we always need clones
    if return_estimators or n_jobs > 1 or n_jobs is None:
        estimators = [clone_estimator(estimator) for _ in param_sets]
    else:
        estimators = [estimator for _ in param_sets]

    # if we evaluate, make sure we have a list of functions to evaluate
    if _types.is_string(evaluate):
        evaluate = [evaluate]
    if _types.is_string(evaluate_args):
        evaluate_args = [evaluate_args]

    if evaluate is not None and evaluate_args is not None and len(evaluate) != len(evaluate_args):
        raise ValueError("length mismatch: evaluate ({}) and evaluate_args ({})".format(len(evaluate), len(evaluate_args)))

    # set call back for joblib
    if progress_reporter is not None and show_progress:
        progress_reporter._progress_register(len(estimators), stage=0,
                                             description="estimating %s" % str(estimator.__class__.__name__))

        if n_jobs > 1:
            try:
                from joblib.parallel import BatchCompletionCallBack
                batch_comp_call_back = True
            except ImportError:
                from joblib.parallel import CallBack as BatchCompletionCallBack
                batch_comp_call_back = False

            class CallBack(BatchCompletionCallBack):
                def __init__(self, *args, **kw):
                    self.reporter = progress_reporter
                    super(CallBack, self).__init__(*args, **kw)

                def __call__(self, *args, **kw):
                    self.reporter._progress_update(1, stage=0)
                    super(CallBack, self).__call__(*args, **kw)

            import joblib.parallel
            if batch_comp_call_back:
                joblib.parallel.BatchCompletionCallBack = CallBack
            else:
                joblib.parallel.CallBack = CallBack
        else:
            def _print(msg, msg_args):
                # NOTE: this is a ugly hack, because if we only use one job,
                # we do not get the joblib callback interface, as a workaround
                # we use the Parallel._print function, which is called with
                # msg_args = (done_jobs, total_jobs)
                if len(msg_args) == 2:
                    progress_reporter._progress_update(1, stage=0)

    # iterate over parameter settings
    from joblib import Parallel
    import joblib, mock, six

    if six.PY34:
        from multiprocessing import get_context
        try:
            ctx = get_context(method='forkserver')
        except ValueError:  # forkserver NA
            try:
                # this is slower in creation, but will not use as much memory!
                ctx = get_context(method='spawn')
            except ValueError:
                ctx = get_context(None)
                print("WARNING: using default multiprocessing start method {}. "
                      "This could potentially lead to memory issues.".format(ctx))

        with mock.patch('joblib.parallel.DEFAULT_MP_CONTEXT', ctx):
            pool = Parallel(n_jobs=n_jobs)
    else:
        pool = Parallel(n_jobs=n_jobs)

    if progress_reporter is not None and n_jobs == 1:
        pool._print = _print
        # NOTE: verbose has to be set, otherwise our print hack does not work.
        pool.verbose = 50

    if n_jobs > 1:
        # if n_jobs=1 don't invoke the pool, but directly dispatch the iterator
        task_iter = (joblib.delayed(_estimate_param_scan_worker)(estimators[i],
                                                                 param_sets[i], X,
                                                                 evaluate,
                                                                 evaluate_args,
                                                                 failfast,
                                                                 )
                     for i in range(len(param_sets)))

        # container for model or function evaluations
        res = pool(task_iter)
    else:
        res = []
        for i, param in enumerate(param_sets):
            res.append(_estimate_param_scan_worker(estimators[i], param, X,
                                                   evaluate, evaluate_args, failfast))
            if progress_reporter is not None and show_progress:
                progress_reporter._progress_update(1, stage=0)

    if progress_reporter is not None and show_progress:
        progress_reporter._progress_force_finish(0)

    # done
    if return_estimators:
        return res, estimators
    else:
        return res

Example 127

Project: makina-states.pack1 Source File: makina_grains_tests.py
    def test_pg(self):
        with contextlib.nested(
            patch(
                'os.path.exists', MagicMock(return_value=False)
            ),
            patch(
                'os.listdir', MagicMock(return_value=0)
            )
        ):
            fun = self.get_private('makina_grains._pgsql_vers')
            ret = fun()
            self.assertEquals(ret['details'], {})
            self.assertEquals(ret['global'], {})

        def do_(path):
            if path in [
                '/var/lib/postgresql/9.0/main/postmaster.pid'
            ]:
                return True
            return False

        with contextlib.nested(
            patch(
                'os.path.exists', MagicMock(side_effect=do_)
            ),
            patch(
                'os.listdir', MagicMock(return_value=0)
            )
        ):
            fun = self.get_private('makina_grains._pgsql_vers')
            ret = fun()
            self.assertEquals(ret['global'], {'9.0': True})
            self.assertEquals(ret['details'],
                              {'9.0': {'has_data': False, 'running': True}})

        def do_(path):
            if path in [
                '/var/lib/postgresql/9.0/main/postmaster.pid',
                '/var/lib/postgresql/9.0/main/base',
                '/var/lib/postgresql/9.0/main/globalbase'
            ]:
                return True
            return False

        with contextlib.nested(
            patch(
                'os.path.exists', MagicMock(side_effect=do_)
            ),
            patch(
                'os.listdir', MagicMock(return_value=0)
            )
        ):
            fun = self.get_private('makina_grains._pgsql_vers')
            ret = fun()
            self.assertEquals(ret['global'], {'9.0': True})
            self.assertEquals(ret['details'],
                              {'9.0': {'has_data': False, 'running': True}})

        def do_(path):
            if path in [
                '/var/lib/postgresql/9.0/main/postmaster.pid',
                '/var/lib/postgresql/9.0/main/base',
                '/var/lib/postgresql/9.0/main/globalbase'
            ]:
                return True
            return False

        with contextlib.nested(
            patch(
                'os.path.exists', MagicMock(side_effect=do_)
            ),
            patch(
                'os.listdir', MagicMock(return_value=3)
            )
        ):
            fun = self.get_private('makina_grains._pgsql_vers')
            ret = fun()
            self.assertEquals(ret['global'], {'9.0': True})
            self.assertEquals(ret['details'],
                              {'9.0': {'has_data': True, 'running': True}})

Example 128

Project: sentry Source File: sentry.py
def pytest_configure(config):
    # HACK: Only needed for testing!
    os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')

    os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')

    if not settings.configured:
        # only configure the db if its not already done
        test_db = os.environ.get('DB', 'postgres')
        if test_db == 'mysql':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.mysql',
                'NAME': 'sentry',
                'USER': 'root',
                'HOST': '127.0.0.1',
            })
            # mysql requires running full migration all the time
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == 'postgres':
            settings.DATABASES['default'].update({
                'ENGINE': 'sentry.db.postgres',
                'USER': 'postgres',
                'NAME': 'sentry',
            })
            # postgres requires running full migration all the time
            # since it has to install stored functions which come from
            # an actual migration.
            settings.SOUTH_TESTS_MIGRATE = True
        elif test_db == 'sqlite':
            settings.DATABASES['default'].update({
                'ENGINE': 'django.db.backends.sqlite3',
                'NAME': ':memory:',
            })
            settings.SOUTH_TESTS_MIGRATE = os.environ.get('SENTRY_SOUTH_TESTS_MIGRATE', '1') == '1'
        else:
            raise RuntimeError('oops, wrong database: %r' % test_db)

    settings.TEMPLATE_DEBUG = True

    # Disable static compiling in tests
    settings.STATIC_BUNDLES = {}

    # override a few things with our test specifics
    settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + (
        'tests',
    )
    # Need a predictable key for tests that involve checking signatures
    settings.SENTRY_PUBLIC = False

    if not settings.SENTRY_CACHE:
        settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
        settings.SENTRY_CACHE_OPTIONS = {}

    # This speeds up the tests considerably, pbkdf2 is by design, slow.
    settings.PASSWORD_HASHERS = [
        'django.contrib.auth.hashers.MD5PasswordHasher',
    ]

    # Replace real sudo middleware with our mock sudo middleware
    # to assert that the user is always in sudo mode
    middleware = list(settings.MIDDLEWARE_CLASSES)
    sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
    middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
    settings.MIDDLEWARE_CLASSES = tuple(middleware)

    # enable draft features
    settings.SENTRY_OPTIONS['mail.enable-replies'] = True

    settings.SENTRY_ALLOW_ORIGIN = '*'

    settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
    settings.SENTRY_TSDB_OPTIONS = {}

    settings.BROKER_BACKEND = 'memory'
    settings.BROKER_URL = None
    settings.CELERY_ALWAYS_EAGER = False
    settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True

    settings.DEBUG_VIEWS = True

    settings.DISABLE_RAVEN = True

    settings.CACHES = {
        'default': {
            'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
        }
    }

    if not hasattr(settings, 'SENTRY_OPTIONS'):
        settings.SENTRY_OPTIONS = {}

    settings.SENTRY_OPTIONS.update({
        'redis.clusters': {
            'default': {
                'hosts': {
                    0: {
                        'db': 9,
                    },
                },
            },
        },
        'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
        'system.url-prefix': 'http://testserver',
    })

    # django mail uses socket.getfqdn which doesn't play nice if our
    # networking isn't stable
    patcher = mock.patch('socket.getfqdn', return_value='localhost')
    patcher.start()

    from sentry.runner.initializer import (
        bootstrap_options, configure_structlog, initialize_receivers, fix_south,
        bind_cache_to_option_store)

    bootstrap_options(settings)
    configure_structlog()
    fix_south(settings)

    bind_cache_to_option_store()

    initialize_receivers()

    from sentry.plugins import plugins
    from sentry.plugins.utils import TestIssuePlugin2

    plugins.register(TestIssuePlugin2)

    from sentry.utils.redis import clusters

    with clusters.get('default').all() as client:
        client.flushdb()

    # force celery registration
    from sentry.celery import app  # NOQA

    # disable DISALLOWED_IPS
    from sentry import http
    http.DISALLOWED_IPS = set()

Example 129

Project: minio-py Source File: list_incomplete_uploads_test.py
    @mock.patch('urllib3.PoolManager')
    def test_list_multipart_uploads_works(self, mock_connection):
        mock_data1 = '''<?xml version="1.0"?>
                        <ListMultipartUploadsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
                          <Bucket>golang</Bucket>
                          <KeyMarker/>
                          <UploadIdMarker/>
                          <NextKeyMarker>keymarker</NextKeyMarker>
                          <NextUploadIdMarker>uploadidmarker</NextUploadIdMarker>
                          <EncodingType/>
                          <MaxUploads>1000</MaxUploads>
                          <IsTruncated>true</IsTruncated>
                          <Upload>
                            <Key>go1.4.2</Key>
                            <UploadId>uploadid</UploadId>
                            <Initiator>
                              <ID/>
                              <DisplayName/>
                            </Initiator>
                            <Owner>
                              <ID/>
                              <DisplayName/>
                            </Owner>
                            <StorageClass/>
                            <Initiated>2015-05-30T14:43:35.349Z</Initiated>
                          </Upload>
                          <Upload>
                            <Key>go1.5.0</Key>
                            <UploadId>uploadid2</UploadId>
                            <Initiator>
                              <ID/>
                              <DisplayName/>
                            </Initiator>
                            <Owner>
                              <ID/>
                              <DisplayName/>
                            </Owner>
                            <StorageClass/>
                            <Initiated>2015-05-30T15:00:07.759Z</Initiated>
                          </Upload>
                          <Prefix/>
                          <Delimiter/>
                        </ListMultipartUploadsResult>
                     '''
        mock_data2 = '''<?xml version="1.0"?>
                        <ListMultipartUploadsResult xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
                          <Bucket>golang</Bucket>
                          <KeyMarker/>
                          <UploadIdMarker/>
                          <NextKeyMarker/>
                          <NextUploadIdMarker/>
                          <EncodingType/>
                          <MaxUploads>1000</MaxUploads>
                          <IsTruncated>false</IsTruncated>
                          <Upload>
                            <Key>go1.4.2</Key>
                            <UploadId>uploadid</UploadId>
                            <Initiator>
                              <ID/>
                              <DisplayName/>
                            </Initiator>
                            <Owner>
                              <ID/>
                              <DisplayName/>
                            </Owner>
                            <StorageClass/>
                            <Initiated>2015-05-30T14:43:35.349Z</Initiated>
                          </Upload>
                          <Upload>
                            <Key>go1.5.0</Key>
                            <UploadId>uploadid2</UploadId>
                            <Initiator>
                              <ID/>
                              <DisplayName/>
                            </Initiator>
                            <Owner>
                              <ID/>
                              <DisplayName/>
                            </Owner>
                            <StorageClass/>
                            <Initiated>2015-05-30T15:00:07.759Z</Initiated>
                          </Upload>
                          <Prefix/>
                          <Delimiter/>
                        </ListMultipartUploadsResult>
                     '''
        mock_server = MockConnection()
        mock_connection.return_value = mock_server
        mock_server.mock_add_request(
            MockResponse('GET',
                         'https://localhost:9000/bucket/?max-uploads=1000&uploads',
                         {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data1))

        client = Minio('localhost:9000')
        upload_iter = client._list_incomplete_uploads('bucket', '', True, False)
        uploads = []
        for upload in upload_iter:
            mock_server.mock_add_request(MockResponse('GET',
                                                      'https://localhost:9000/bucket/?'
                                                      'key-marker=keymarker&'
                                                      'max-uploads=1000&'
                                                      'upload-id-marker=uploadidmarker&uploads',
                                                      {'User-Agent': _DEFAULT_USER_AGENT}, 200, content=mock_data2))
            uploads.append(upload)

        eq_(4, len(uploads))

Example 130

Project: dockercloud-cli Source File: test_parser.py
    @mock.patch('dockercloudcli.cli.commands')
    def test_service_dispatch(self, mock_cmds):
        args = self.parser.parse_args(['service', 'create', 'mysql'])
        dispatch_cmds(args)
        mock_cmds.service_create.assert_called_with(image=args.image, name=args.name, cpu_shares=args.cpushares,
                                                    memory=args.memory,
                                                    target_num_containers=args.target_num_containers,
                                                    privileged=args.privileged,
                                                    run_command=args.run_command,
                                                    entrypoint=args.entrypoint, expose=args.expose,
                                                    publish=args.publish,
                                                    envvars=args.env, envfiles=args.env_file, tag=args.tag,
                                                    linked_to_service=args.link_service,
                                                    autorestart=args.autorestart, autodestroy=args.autodestroy,
                                                    autoredeploy=args.autoredeploy, roles=args.role,
                                                    sequential=args.sequential,
                                                    volume=args.volume, volumes_from=args.volumes_from,
                                                    deployment_strategy=args.deployment_strategy, sync=args.sync,
                                                    net=args.net, pid=args.pid)

        args = self.parser.parse_args(['service', 'inspect', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_inspect.assert_called_with(args.identifier)

        args = self.parser.parse_args(['service', 'logs', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_logs.assert_called_with(args.identifier, None, False)

        args = self.parser.parse_args(['service', 'ps'])
        dispatch_cmds(args)
        mock_cmds.service_ps.assert_called_with(args.quiet, args.status, args.stack)

        args = self.parser.parse_args(['service', 'redeploy', 'mysql'])
        dispatch_cmds(args)
        mock_cmds.service_redeploy.assert_called_with(args.identifier, args.not_reuse_volumes, args.sync)

        args = self.parser.parse_args(['service', 'run', 'mysql'])
        dispatch_cmds(args)
        mock_cmds.service_run.assert_called_with(image=args.image, name=args.name, cpu_shares=args.cpushares,
                                                 memory=args.memory, target_num_containers=args.target_num_containers,
                                                 privileged=args.privileged,
                                                 run_command=args.run_command,
                                                 entrypoint=args.entrypoint, expose=args.expose, publish=args.publish,
                                                 envvars=args.env, envfiles=args.env_file, tag=args.tag,
                                                 linked_to_service=args.link_service,
                                                 autorestart=args.autorestart, autodestroy=args.autodestroy,
                                                 autoredeploy=args.autoredeploy, roles=args.role,
                                                 sequential=args.sequential,
                                                 volume=args.volume, volumes_from=args.volumes_from,
                                                 deployment_strategy=args.deployment_strategy, sync=args.sync,
                                                 net=args.net, pid=args.pid)

        args = self.parser.parse_args(['service', 'scale', 'id', '3'])
        dispatch_cmds(args)
        mock_cmds.service_scale.assert_called_with(args.identifier, args.target_num_containers, args.sync)

        args = self.parser.parse_args(['service', 'set', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_set.assert_called_with(args.identifier, image=args.image, cpu_shares=args.cpushares,
                                                 memory=args.memory, privileged=args.privileged,
                                                 target_num_containers=args.target_num_containers,
                                                 run_command=args.run_command,
                                                 entrypoint=args.entrypoint, expose=args.expose, publish=args.publish,
                                                 envvars=args.env, envfiles=args.env_file,
                                                 tag=args.tag, linked_to_service=args.link_service,
                                                 autorestart=args.autorestart, autodestroy=args.autodestroy,
                                                 autoredeploy=args.autoredeploy, roles=args.role,
                                                 sequential=args.sequential, redeploy=args.redeploy,
                                                 volume=args.volume, volumes_from=args.volumes_from,
                                                 deployment_strategy=args.deployment_strategy, sync=args.sync,
                                                 net=args.net, pid=args.pid)

        args = self.parser.parse_args(['service', 'start', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_start.assert_called_with(args.identifier, args.sync)

        args = self.parser.parse_args(['service', 'stop', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_stop.assert_called_with(args.identifier, args.sync)

        args = self.parser.parse_args(['service', 'terminate', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_terminate.assert_called_with(args.identifier, args.sync)

        args = self.parser.parse_args(['service', 'env', 'add', 'id', '--env', 'abc=abc'])
        dispatch_cmds(args)
        mock_cmds.service_env_add.assert_called_with(args.identifier, envvars=args.env, envfiles=args.env_file,
                                                     redeploy=args.redeploy, sync=args.sync)

        args = self.parser.parse_args(['service', 'env', 'set', 'id', '--env', 'abc=abc'])
        dispatch_cmds(args)
        mock_cmds.service_env_set.assert_called_with(args.identifier, envvars=args.env, envfiles=args.env_file,
                                                     redeploy=args.redeploy, sync=args.sync)

        args = self.parser.parse_args(['service', 'env', 'update', 'id', '--env', 'abc=abc'])
        dispatch_cmds(args)
        mock_cmds.service_env_update.assert_called_with(args.identifier, envvars=args.env, envfiles=args.env_file,
                                                        redeploy=args.redeploy, sync=args.sync)

        args = self.parser.parse_args(['service', 'env', 'rm', 'id', '--name', 'abc'])
        dispatch_cmds(args)
        mock_cmds.service_env_rm.assert_called_with(args.identifier, names=args.name,
                                                    redeploy=args.redeploy, sync=args.sync)

        args = self.parser.parse_args(['service', 'env', 'ls', 'id'])
        dispatch_cmds(args)
        mock_cmds.service_env_ls.assert_called_with(args.identifier, args.quiet, args.user, args.image,
                                                    args.dockercloud)

Example 131

Project: treadmill Source File: finish_test.py
    @mock.patch('kazoo.client.KazooClient.exists', mock.Mock())
    @mock.patch('kazoo.client.KazooClient.get_children', mock.Mock())
    @mock.patch('shutil.copy', mock.Mock())
    @mock.patch('treadmill.appevents.post', mock.Mock())
    @mock.patch('treadmill.utils.datetime_utcnow', mock.Mock(
        return_value=datetime.datetime(2015, 1, 22, 14, 14, 36, 537918)))
    @mock.patch('treadmill.appmgr.manifest.read', mock.Mock())
    @mock.patch('treadmill.appmgr.finish._kill_apps_by_root', mock.Mock())
    @mock.patch('treadmill.appmgr.finish._send_container_archive', mock.Mock())
    @mock.patch('treadmill.sysinfo.hostname',
                mock.Mock(return_value='xxx.xx.com'))
    @mock.patch('treadmill.fs.archive_filesystem',
                mock.Mock(return_value=True))
    @mock.patch('treadmill.iptables.rm_ip_set', mock.Mock())
    @mock.patch('treadmill.subproc.call', mock.Mock(return_value=0))
    @mock.patch('treadmill.subproc.check_call', mock.Mock())
    @mock.patch('treadmill.subproc.invoke', mock.Mock())
    @mock.patch('treadmill.zkutils.connect', mock.Mock())
    @mock.patch('treadmill.zkutils.ensure_deleted', mock.Mock())
    @mock.patch('treadmill.zkutils.get',
                mock.Mock(return_value={
                    'server': 'nonexist',
                    'auth': 'nonexist',
                }))
    @mock.patch('treadmill.zkutils.put', mock.Mock())
    @mock.patch('treadmill.rrdutils.flush_noexc', mock.Mock())
    def test_finish(self):
        """Tests container finish procedure and freeing of the resources.
        """
        # Access protected module _kill_apps_by_root
        # pylint: disable=W0212
        manifest = {
            'app': 'proid.myapp',
            'cell': 'test',
            'cpu': '100%',
            'disk': '100G',
            'environment': 'dev',
            'host_ip': '172.31.81.67',
            'memory': '100M',
            'name': 'proid.myapp#001',
            'proid': 'foo',
            'shared_network': False,
            'task': '001',
            'uniqueid': '0000000ID1234',
            'archive': [
                '/var/tmp/treadmill'
            ],
            'endpoints': [
                {
                    'port': 8000,
                    'name': 'http',
                    'real_port': 5000
                },
                {
                    'port': 54321,
                    'type': 'infra',
                    'name': 'ssh',
                    'real_port': 54321
                }
            ],
            'ephemeral_ports': [
                45024,
                62422
            ],
            'services': [
                {
                    'name': 'web_server',
                    'command': '/bin/false',
                    'restart': {
                        'limit': 3,
                        'interval': 60,
                    },
                }
            ],
        }
        treadmill.appmgr.manifest.read.return_value = manifest
        app_unique_name = 'proid.myapp-001-0000000ID1234'
        mock_cgroup_client = self.app_env.svc_cgroup.make_client.return_value
        mock_ld_client = self.app_env.svc_localdisk.make_client.return_value
        localdisk = {
            'block_dev': '/dev/foo',
        }
        mock_ld_client.get.return_value = localdisk
        mock_nwrk_client = self.app_env.svc_network.make_client.return_value
        network = {
            'vip': '192.168.0.2',
            'gateway': '192.168.254.254',
            'veth': 'testveth.0',
        }
        mock_nwrk_client.get.return_value = network
        app_dir = os.path.join(self.app_env.apps_dir, app_unique_name)
        # Create content in app root directory, verify that it is archived.
        fs.mkdir_safe(os.path.join(app_dir, 'root', 'xxx'))
        fs.mkdir_safe(os.path.join(app_dir, 'services'))
        # Simulate daemontools finish script, marking the app is done.
        with open(os.path.join(app_dir, 'exitinfo'), 'w') as f:
            f.write(yaml.dump({'service': 'web_server', 'rc': 0, 'sig': 0}))
        kazoo.client.KazooClient.exists.return_value = True
        kazoo.client.KazooClient.get_children.return_value = []

        zkclient = kazoo.client.KazooClient()
        app_finish.finish(self.app_env, zkclient, app_dir)

        self.app_env.watchdogs.create.assert_called_with(
            'treadmill.appmgr.finish-' + app_unique_name,
            '5m',
            mock.ANY
        )
        treadmill.subproc.check_call.assert_has_calls(
            [
                mock.call(
                    [
                        's6-svc',
                        '-d',
                        app_dir,
                    ]
                ),
                mock.call(
                    [
                        's6-svwait',
                        '-d',
                        app_dir,
                    ]
                ),
            ]
        )
        self.app_env.svc_cgroup.make_client.assert_called_with(
            os.path.join(app_dir, 'cgroups')
        )
        self.app_env.svc_localdisk.make_client.assert_called_with(
            os.path.join(app_dir, 'localdisk')
        )
        treadmill.appmgr.finish._kill_apps_by_root.assert_called_with(
            os.path.join(app_dir, 'root')
        )

        # Verify that we tested the archiving for the app root volume
        treadmill.fs.archive_filesystem.assert_called_with(
            '/dev/foo',
            os.path.join(app_dir, 'root'),
            os.path.join(app_dir,
                         '001_xxx.xx.com_20150122_141436537918.tar'),
            mock.ANY
        )
        # Verify that the file is uploaded by Uploader
        app = utils.to_obj(manifest)
        treadmill.appmgr.finish._send_container_archive.assert_called_with(
            zkclient,
            app,
            os.path.join(app_dir,
                         '001_xxx.xx.com_20150122_141436537918.tar.gz'),
        )
        # Verify that the app folder was deleted
        self.assertFalse(os.path.exists(app_dir))
        # Cleanup the block device
        mock_ld_client.delete.assert_called_with(app_unique_name)
        # Cleanup the cgroup resource
        mock_cgroup_client.delete.assert_called_with(app_unique_name)
        self.app_env.rules.unlink_rule.assert_has_calls([
            mock.call(rule=firewall.DNATRule('172.31.81.67', 5000,
                                             '192.168.0.2', 8000),
                      owner=app_unique_name),
            mock.call(rule=firewall.DNATRule('172.31.81.67', 54321,
                                             '192.168.0.2', 54321),
                      owner=app_unique_name),
            mock.call(rule=firewall.DNATRule('172.31.81.67', 45024,
                                             '192.168.0.2', 45024),
                      owner=app_unique_name),
            mock.call(rule=firewall.DNATRule('172.31.81.67', 62422,
                                             '192.168.0.2', 62422),
                      owner=app_unique_name),
        ])
        treadmill.iptables.rm_ip_set.assert_has_calls(
            [
                mock.call(treadmill.iptables.SET_INFRA_SVC,
                          '192.168.0.2,tcp:54321'),
                mock.call(treadmill.iptables.SET_INFRA_SVC,
                          '192.168.0.2,tcp:45024'),
                mock.call(treadmill.iptables.SET_INFRA_SVC,
                          '192.168.0.2,tcp:62422'),
            ]
        )
        treadmill.appevents.post.assert_called_with(
            mock.ANY,
            'proid.myapp#001', 'finished', '0.0',
            {'sig': 0,
             'service':
             'web_server',
             'rc': 0}
        )
        treadmill.rrdutils.flush_noexc.assert_called_with(
            os.path.join(self.root, 'metrics', 'apps',
                         app_unique_name + '.rrd')
        )
        shutil.copy.assert_called_with(
            os.path.join(self.root, 'metrics', 'apps',
                         app_unique_name + '.rrd'),
            os.path.join(app_dir, 'metrics.rrd')
        )

Example 132

Project: girder Source File: file_test.py
    @moto.mock_s3bucket_path
    def atestS3Assetstore(self):
        botoParams = makeBotoConnectParams('access', 'secret')
        mock_s3.createBucket(botoParams, 'b')

        self.model('assetstore').remove(self.model('assetstore').getCurrent())
        assetstore = self.model('assetstore').createS3Assetstore(
            name='test', bucket='b', accessKeyId='access', secret='secret',
            prefix='test')
        self.assetstore = assetstore

        # Initialize the upload
        resp = self.request(
            path='/file', method='POST', user=self.user, params={
                'parentType': 'folder',
                'parentId': self.privateFolder['_id'],
                'name': 'hello.txt',
                'size': len(chunk1) + len(chunk2),
                'mimeType': 'text/plain'
            })
        self.assertStatusOk(resp)

        self.assertFalse(resp.json['s3']['chunked'])
        uploadId = resp.json['_id']
        fields = [('offset', 0), ('uploadId', uploadId)]
        files = [('chunk', 'hello.txt', chunk1)]

        # Send the first chunk, we should get a 400
        resp = self.multipartRequest(
            path='/file/chunk', user=self.user, fields=fields, files=files)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json['message'],
            'Uploads of this length must be sent in a single chunk.')

        # Attempting to send second chunk with incorrect offset should fail
        fields = [('offset', 100), ('uploadId', uploadId)]
        files = [('chunk', 'hello.txt', chunk2)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.user, fields=fields, files=files)
        self.assertStatus(resp, 400)
        self.assertEqual(
            resp.json['message'],
            'Server has received 0 bytes, but client sent offset 100.')

        # Request offset from server (simulate a resume event)
        resp = self.request(path='/file/offset', method='GET', user=self.user,
                            params={'uploadId': uploadId})
        self.assertStatusOk(resp)

        # Trying to send too many bytes should fail
        currentOffset = resp.json['offset']
        fields = [('offset', resp.json['offset']), ('uploadId', uploadId)]
        files = [('chunk', 'hello.txt', "extra_"+chunk2+"_bytes")]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.user, fields=fields, files=files)
        self.assertStatus(resp, 400)
        self.assertEqual(resp.json, {
            'type': 'validation',
            'message': 'Received too many bytes.'
        })

        # The offset should not have changed
        resp = self.request(path='/file/offset', method='GET', user=self.user,
                            params={'uploadId': uploadId})
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['offset'], currentOffset)

        # Send all in one chunk
        files = [('chunk', 'hello.txt', chunk1 + chunk2)]
        fields = [('offset', 0), ('uploadId', uploadId)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.user, fields=fields, files=files)
        self.assertStatusOk(resp)

        file = self.model('file').load(resp.json['_id'], force=True)

        self.assertHasKeys(file, ['itemId'])
        self.assertEqual(file['assetstoreId'], self.assetstore['_id'])
        self.assertEqual(file['name'], 'hello.txt')
        self.assertEqual(file['size'], len(chunk1 + chunk2))

        # Make sure metadata is updated in S3 when file info changes
        # (moto API doesn't cover this at all, so we manually mock.)
        with mock.patch('boto.s3.key.Key.set_remote_metadata') as m:
            resp = self.request(
                '/file/%s' % str(file['_id']), method='PUT', params={
                    'mimeType': 'application/csv',
                    'name': 'new name'
                }, user=self.user)
            self.assertEqual(len(m.mock_calls), 1)
            self.assertEqual(m.mock_calls[0][2], {
                'metadata_plus': {
                    'Content-Type': 'application/csv',
                    'Content-Disposition': b'attachment; filename="new name"'
                },
                'metadata_minus': [],
                'preserve_acl': True
            })

        # Enable testing of multi-chunk proxied upload
        S3AssetstoreAdapter.CHUNK_LEN = 5

        resp = self.request(
            path='/file', method='POST', user=self.user, params={
                'parentType': 'folder',
                'parentId': self.privateFolder['_id'],
                'name': 'hello.txt',
                'size': len(chunk1) + len(chunk2),
                'mimeType': 'text/plain'
            })
        self.assertStatusOk(resp)
        self.assertTrue(resp.json['s3']['chunked'])

        uploadId = resp.json['_id']
        fields = [('offset', 0), ('uploadId', uploadId)]
        files = [('chunk', 'hello.txt', chunk1)]

        # Send the first chunk, should now work
        resp = self.multipartRequest(
            path='/file/chunk', user=self.user, fields=fields, files=files)
        self.assertStatusOk(resp)

        resp = self.request(path='/file/offset', user=self.user, params={
            'uploadId': uploadId
        })
        self.assertStatusOk(resp)
        self.assertEqual(resp.json['offset'], len(chunk1))

        # Hack: make moto accept our too-small chunks
        moto.s3.models.UPLOAD_PART_MIN_SIZE = 5

        # Send the second chunk
        files = [('chunk', 'hello.txt', chunk2)]
        fields = [('offset', resp.json['offset']), ('uploadId', uploadId)]
        resp = self.multipartRequest(
            path='/file/chunk', user=self.user, fields=fields, files=files)
        self.assertStatusOk(resp)

        file = resp.json

        self.assertHasKeys(file, ['itemId'])
        self.assertEqual(file['assetstoreId'], str(self.assetstore['_id']))
        self.assertEqual(file['name'], 'hello.txt')
        self.assertEqual(file['size'], len(chunk1 + chunk2))

        # Test copying a file ( we don't assert to content in the case because
        # the S3 download will fail )
        self._testCopyFile(file, assertContent=False)

Example 133

Project: fjord Source File: test_gengo.py
    @override_settings(
        ADMINS=(('Jimmy Discotheque', '[email protected]'),),
        GENGO_ACCOUNT_BALANCE_THRESHOLD=20.0
    )
    def test_gengo_push_translations_not_enough_balance(self):
        """Tests enough balance for one order, but not both"""
        # Add recipients to mailing list
        ml = MailingList.objects.get(name='gengo_balance')
        ml.members = u'[email protected]'
        ml.save()

        ght = GengoHumanTranslator()

        # Create a few jobs covering multiple languages
        descs = [
            ('es', u'Facebook no se puede enlazar con peru'),
            ('de', u'Absturze und langsam unter Android'),
        ]
        for lang, desc in descs:
            obj = SuperModel(locale=lang, desc=desc)
            obj.save()

            job = GengoJob(
                content_object=obj,
                tier='standard',
                src_field='desc',
                dst_field='trans_desc',
                src_lang=lang,
                dst_lang='en'
            )
            job.save()

        with patch('fjord.translations.gengo_utils.Gengo') as GengoMock:
            # FIXME: This returns the same thing both times, but to
            # make the test "more kosher" we'd have this return two
            # different order_id values.
            mocker = GengoMock.return_value
            mocker.getAccountBalance.return_value = {
                u'opstat': u'ok',
                u'response': {
                    # Enough for one order, but dips below threshold
                    # for the second one.
                    u'credits': '20.30',
                    u'currency': u'USD'
                }
            }
            mocker.postTranslationJobs.return_value = {
                u'opstat': u'ok',
                u'response': {
                    u'order_id': u'1337',
                    u'job_count': 2,
                    u'credits_used': u'0.35',
                    u'currency': u'USD'
                }
            }

            ght.push_translations()

            assert GengoOrder.objects.count() == 1
            # The "it's too low" email only.
            assert len(mail.outbox) == 1

        with patch('fjord.translations.gengo_utils.Gengo') as GengoMock:
            # FIXME: This returns the same thing both times, but to
            # make the test "more kosher" we'd have this return two
            # different order_id values.
            mocker = GengoMock.return_value
            mocker.getAccountBalance.return_value = {
                u'opstat': u'ok',
                u'response': {
                    # This is the balance after one order.
                    u'credits': '19.95',
                    u'currency': u'USD'
                }
            }
            mocker.postTranslationJobs.return_value = {
                u'opstat': u'ok',
                u'response': {
                    u'order_id': u'1337',
                    u'job_count': 2,
                    u'credits_used': u'0.35',
                    u'currency': u'USD'
                }
            }

            # The next time push_translations runs, it shouldn't
            # create any new jobs, but should send an email.
            ght.push_translations()

            assert GengoOrder.objects.count() == 1
            # This generates one more email.
            assert len(mail.outbox) == 2

Example 134

Project: cti-toolkit Source File: test_misp_submission.py
@httpretty.activate
@mock.patch('certau.transform.misp.time.sleep')
def test_misp_publishing(_):
    """Test that the stixtrans module can submit to a MISP server."""
    # STIX file to test against. Place in a StringIO instance so we can
    # close the file.
    with open('tests/CA-TEST-STIX.xml', 'rb') as stix_f:
        stix_io = StringIO.StringIO(stix_f.read())

    # Create a transformer - select 'text' output format and flag MISP
    # publishing (with appropriate settings).
    package = stix.core.STIXPackage.from_xml(stix_io)
    misp_args = {
        'misp_url': 'http://misp.host.tld/',
        'misp_key': '111111111111111111111111111',
    }
    misp_event_args = {
        'distribution': 1,
        'threat_level': 4,
        'analysis': 0,
    }

    # Ensures that non-registered paths fail
    httpretty.HTTPretty.allow_net_connect = False

    # Mock the MISP version retrieval.
    httpretty.register_uri(
        httpretty.GET,
        'http://misp.host.tld/servers/getVersion',
        body=json.dumps({}),
        content_type='application/json',
    )

    # Mock the creation of an event
    httpretty.register_uri(
        httpretty.POST,
        'http://misp.host.tld/events',
        body=json.dumps({'Event': {
            'id': '0',
            'distribution': misp_event_args['distribution'],
        }}),
        content_type='application/json',
    )

    # Mock the adding of a tag to an event
    httpretty.register_uri(
        httpretty.POST,
        'http://misp.host.tld/events/addTag',
        body=json.dumps({'Event': {
            'id': '0',
            'tag': 4,
        }}),
        content_type='application/json',
    )

    # Mock editing of a created event.
    httpretty.register_uri(
        httpretty.POST,
        'http://misp.host.tld/events/0',
        body=json.dumps({}),
        content_type='application/json',
    )

    # Perform the processing and the misp publishing.
    misp = certau.transform.StixMispTransform.get_misp_object(
        **misp_args
    )
    transformer = certau.transform.StixMispTransform(
        package=package,
        misp=misp,
        **misp_event_args
    )
    transformer.publish()

    # Test the correct requests were made
    reqs = list(httpretty.httpretty.latest_requests)

    # The "get version" request includes the MISP key.
    r_get_version = reqs[0]
    assert r_get_version.path == '/servers/getVersion'
    assert r_get_version.headers.dict['authorization'] == misp_args['misp_key']

    # The event creation request includes basic information.
    r_create_event = reqs[1]
    assert r_create_event.path == '/events'
    assert json.loads(r_create_event.body) == {
        u'Event': {
            u'analysis': misp_event_args['analysis'],
            u'published': False,
            u'threat_level_id': misp_event_args['threat_level'],
            u'distribution': misp_event_args['distribution'],
            u'date': '2015-12-23',
            u'info': 'CA-TEST-STIX | Test STIX data'
        }
    }

    # The TLP tag is added to the event.
    r_add_tag = reqs[2]
    assert r_add_tag.path == '/events/addTag'
    assert json.loads(r_add_tag.body) == {
        u'request': {
            u'Event': {
                u'id': '0',
                u'tag': 4,
            }
        }
    }

    # The event is then updated with the observables, over multiple
    # requests. We're only interested in the 'Attribute' key here as that
    # contains the data extracted from the observable.
    obs_attributes = sorted([json.loads(request.body)['Event']['Attribute'][0]
                             for request
                             in reqs[3:]])

    assert obs_attributes == sorted([
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'md5',
            u'value': u'11111111111111112977fa0588bd504a',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'md5',
            u'value': u'ccccccccccccccc33574c79829dc1ccf',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'md5',
            u'value': u'11111111111111133574c79829dc1ccf',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'md5',
            u'value': u'11111111111111111f2601b4d21660fb',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'md5',
            u'value': u'1111111111b42b57f518197d930471d9',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'mutex',
            u'value': u'\\BaseNamedObjects\\MUTEX_0001',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'mutex',
            u'value': u'\\BaseNamedObjects\\WIN_ABCDEF',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'mutex',
            u'value': u'\\BaseNamedObjects\\iurlkjashdk',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'regkey|value',
            u'value': u'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\Run|hotkey\\%APPDATA%\\malware.exe -st',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'sha1',
            u'value': u'893fb19ac24eabf9b1fe1ddd1111111111111111',
        },
        {
            u'category': u'Artifacts dropped',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'sha256',
            u'value': u'11111111111111119f167683e164e795896be3be94de7f7103f67c6fde667bdf',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'domain',
            u'value': u'bad.domain.org',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'domain',
            u'value': u'dnsupdate.dyn.net',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'domain',
            u'value': u'free.stuff.com',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'ip-dst',
            u'value': u'183.82.180.95',
        },

        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'ip-dst',
            u'value': u'111.222.33.44',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'ip-dst',
            u'value': u'158.164.39.51',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'url',
            u'value': u'http://host.domain.tld/path/file',
        },
        {
            u'category': u'Network activity',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'user-agent',
            u'value': u'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.2309.372 Safari/537.36',
        },
        {
            u'category': u'Payload delivery',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'email-src',
            u'value': u'[email protected]',
        },
        {
            u'category': u'Payload delivery',
            u'distribution': 1,
            u'to_ids': True,
            u'type': u'email-subject',
            u'value': u'Important project details',
        },
    ])

Example 135

Project: socorro Source File: test_legacy_processor.py
    def test_process_crash_basic(self):
        config = setup_config_with_mocks()
        mocked_transform_rules_str = \
            'socorro.processor.legacy_processor.TransformRuleSystem'
        with mock.patch(mocked_transform_rules_str) as m_transform_class:
            m_transform = mock.Mock()
            m_transform_class.return_value = m_transform
            m_transform.attach_mock(mock.Mock(), 'apply_all_rules')
            utc_now_str = 'socorro.processor.legacy_processor.utc_now'
            with mock.patch(utc_now_str) as m_utc_now:
                m_utc_now.return_value = datetime(2012, 5, 4, 15, 11,
                                                  tzinfo=UTC)

                raw_crash = DotDict()
                raw_crash.uuid = '3bc4bcaa-b61d-4d1f-85ae-30cb32120504'
                raw_crash.submitted_timestamp = '2012-05-04T15:33:33'
                raw_dump = {'upload_file_minidump':
                                '/some/path/%s.dump' % raw_crash.uuid,
                            'aux_dump_001':
                            '/some/path/aux_001.%s.dump' % raw_crash.uuid,
                            }
                leg_proc = LegacyCrashProcessor(config, config.mock_quit_fn)

                started_timestamp = datetime(2012, 5, 4, 15, 10, tzinfo=UTC)
                leg_proc._log_job_start = mock.Mock(
                  return_value=started_timestamp
                )

                basic_processed_crash = DotDict()
                basic_processed_crash.uuid = raw_crash.uuid
                basic_processed_crash.hang_type = 0
                basic_processed_crash.java_stack_trace = None
                leg_proc._create_basic_processed_crash = mock.Mock(
                  return_value=basic_processed_crash)

                leg_proc._log_job_end = mock.Mock()

                processed_crash_update_dict = DotDict()
                processed_crash_update_dict.success = True
                leg_proc._do_breakpad_stack_dump_analysis = mock.Mock(
                  return_value=processed_crash_update_dict
                )

                leg_proc._cleanup_temp_file = mock.Mock()

                 # Here's the call being tested
                processed_crash = \
                    leg_proc.process_crash(
                      raw_crash,
                      raw_dump,
                      {}
                    )

                # test the result
                eq_(1, leg_proc._log_job_start.call_count)
                leg_proc._log_job_start.assert_called_with(raw_crash.uuid)

                eq_(1, m_transform.apply_all_rules.call_count)
                m_transform.apply_all_rules.has_calls(
                    mock.call(raw_crash, leg_proc),
                )
                eq_(
                    1,
                    m_transform.apply_until_action_succeeds.call_count
                )
                m_transform.apply_all_rules.has_calls(
                    mock.call(raw_crash, processed_crash, leg_proc)
                )

                eq_(
                  1,
                  leg_proc._create_basic_processed_crash.call_count
                )
                leg_proc._create_basic_processed_crash.assert_called_with(
                  raw_crash.uuid,
                  raw_crash,
                  datetime(2012, 5, 4, 15, 33, 33, tzinfo=UTC),
                  started_timestamp,
                  [
                      'testing_processor:2012',
                      'LegacyCrashProcessor',
                      "Pipe dump missing from 'upload_file_minidump'",
                      "Pipe dump missing from 'aux_dump_001'"
                  ]
                )

                eq_(
                  2,
                  leg_proc._do_breakpad_stack_dump_analysis.call_count
                )
                first_call, second_call = \
                    leg_proc._do_breakpad_stack_dump_analysis.call_args_list
                eq_(
                  first_call,
                  ((raw_crash.uuid, '/some/path/%s.dump' % raw_crash.uuid,
                   0, None, datetime(2012, 5, 4, 15, 33, 33, tzinfo=UTC),
                   [
                      'testing_processor:2012',
                      'LegacyCrashProcessor',
                      "Pipe dump missing from 'upload_file_minidump'",
                      "Pipe dump missing from 'aux_dump_001'"
                   ]),)
                )
                eq_(
                  second_call,
                  ((raw_crash.uuid,
                   '/some/path/aux_001.%s.dump' % raw_crash.uuid,
                   0, None, datetime(2012, 5, 4, 15, 33, 33, tzinfo=UTC),
                   [
                      'testing_processor:2012',
                      'LegacyCrashProcessor',
                      "Pipe dump missing from 'upload_file_minidump'",
                      "Pipe dump missing from 'aux_dump_001'"
                   ]),)
                )

                eq_(1, leg_proc._log_job_end.call_count)
                leg_proc._log_job_end.assert_called_with(
                  datetime(2012, 5, 4, 15, 11, tzinfo=UTC),
                  True,
                  raw_crash.uuid
                )

                epc = DotDict()
                epc.uuid = raw_crash.uuid
                epc.topmost_filenames = ''
                epc.processor_notes = \
                    "testing_processor:2012; LegacyCrashProcessor; " \
                    "Pipe dump missing from " \
                    "'upload_file_minidump'; Pipe dump missing from " \
                    "'aux_dump_001'"

                epc.success = True
                epc.completeddatetime = datetime(2012, 5, 4, 15, 11,
                                                 tzinfo=UTC)
                epc.hang_type = 0
                epc.java_stack_trace = None
                epc.Winsock_LSP = None
                epc.additional_minidumps = ['aux_dump_001']
                epc.aux_dump_001 = {'success': True}
                eq_(
                  processed_crash,
                  dict(epc)
                )

                leg_proc._statistics.assert_has_calls(
                    [
                        mock.call.incr('jobs'),
                        mock.call.incr('restarts')
                    ],
                    any_order=True
                )

Example 136

Project: girder_worker Source File: docker_test.py
    @mock.patch('subprocess.check_output')
    @mock.patch('subprocess.Popen')
    def testDockerMode(self, mockPopen, checkOutput):
        mockPopen.return_value = processMock
        checkOutput.return_value = inspectOutput

        task = {
            'mode': 'docker',
            'docker_image': 'test/test:latest',
            'container_args': [
                '-f', '$input{foo}', '--temp-dir=$input{_tempdir}'],
            'pull_image': True,
            'inputs': [{
                'id': 'foo',
                'name': 'A variable',
                'format': 'string',
                'type': 'string',
                'target': 'filepath'
            }],
            'outputs': [{
                'id': '_stderr',
                'format': 'string',
                'type': 'string'
            }]
        }

        inputs = {
            'foo': {
                'mode': 'http',
                'url': 'https://foo.com/file.txt'
            }
        }

        @httmock.all_requests
        def fetchMock(url, request):
            if url.netloc == 'foo.com' and url.scheme == 'https':
                return 'dummy file contents'
            else:
                raise Exception('Unexpected url ' + repr(url))

        with httmock.HTTMock(fetchMock):
            # Use user-specified filename
            _old = sys.stdout
            mockedStdOut = six.StringIO()
            sys.stdout = mockedStdOut
            out = run(
                task, inputs=inputs, cleanup=False, validate=False,
                auto_convert=False)
            sys.stdout = _old

            # We didn't specify _stdout as an output, so it should just get
            # printed to sys.stdout (which we mocked)
            lines = mockedStdOut.getvalue().splitlines()
            self.assertEqual(lines[0],
                             'Pulling Docker image: test/test:latest')
            self.assertEqual(lines[-2], 'output message')
            self.assertEqual(
                lines[-1], 'Garbage collecting old containers and images.')

            # We bound _stderr as a task output, so it should be in the output
            self.assertEqual(out, {
                '_stderr': {
                    'data': 'error message\n',
                    'format': 'string'
                }
            })

            self.assertEqual(mockPopen.call_count, 3)
            cmd1, cmd2, cmd3 = [x[1]['args'] for x in mockPopen.call_args_list]

            self.assertEqual(cmd1, ('docker', 'pull', 'test/test:latest'))
            self.assertEqual(cmd2[:3],
                             ['docker', 'run', '-v'])
            six.assertRegex(self, cmd2[3], _tmp + '/.*:%s' % DATA_VOLUME)
            self.assertEqual(cmd2[4], '-v')
            six.assertRegex(self, cmd2[5],
                            '%s:%s:ro' % (SCRIPTS_DIR, SCRIPTS_VOLUME))
            self.assertEqual(cmd2[6:9], [
                '--entrypoint',
                '%s/entrypoint.sh' % SCRIPTS_VOLUME,
                'test/test:latest'
            ])
            self.assertEqual(cmd2[9:15], [
                str(os.getuid()), str(os.getgid()),
                '/usr/bin/foo', '--flag', '-f', '%s/file.txt' % DATA_VOLUME])
            self.assertEqual(cmd2[-1], '--temp-dir=%s' % DATA_VOLUME)
            self.assertEqual(len(cmd2), 16)

            self.assertEqual(len(cmd3), 1)
            six.assertRegex(self, cmd3[0], 'docker-gc$')

            # Make sure we can specify a custom entrypoint to the container
            mockPopen.reset_mock()
            task['entrypoint'] = '/bin/bash'

            # Make sure additional docker run args work
            task['docker_run_args'] = ['--net', 'none']

            inputs['foo'] = {
                'mode': 'http',
                'url': 'https://foo.com/file.txt'
            }
            out = run(task, inputs=inputs, validate=False,
                      auto_convert=False)
            self.assertEqual(mockPopen.call_count, 3)
            cmd2 = mockPopen.call_args_list[1][1]['args']
            self.assertEqual(cmd2[6:11], [
                '--entrypoint',
                '%s/entrypoint.sh' % SCRIPTS_VOLUME,
                '--net',
                'none',
                'test/test:latest'
            ])
            self.assertEqual(cmd2[11:16], [
                str(os.getuid()), str(os.getgid()),
                '/bin/bash', '-f', '%s/file.txt' % DATA_VOLUME])

            mockPopen.reset_mock()
            # Make sure custom config settings are respected
            girder_worker.config.set('docker', 'cache_timeout', '123456')
            girder_worker.config.set(
                'docker', 'exclude_images', 'test/test:latest')

            # Make sure we can skip pulling the image
            task['pull_image'] = False
            inputs['foo'] = {
                'mode': 'http',
                'url': 'https://foo.com/file.txt'
            }
            out = run(task, inputs=inputs, validate=False,
                      auto_convert=False)
            self.assertEqual(mockPopen.call_count, 2)
            cmd1, cmd2 = [x[1]['args'] for x in mockPopen.call_args_list]
            self.assertEqual(tuple(cmd1[:2]), ('docker', 'run'))
            self.assertEqual(cmd1[8:10], ['--net', 'none'])
            six.assertRegex(self, cmd2[0], 'docker-gc$')
            env = mockPopen.call_args_list[1][1]['env']
            self.assertEqual(env['GRACE_PERIOD_SECONDS'], '123456')
            six.assertRegex(self, env['EXCLUDE_FROM_GC'],
                            'docker_gc_scratch/.docker-gc-exclude$')

Example 137

Project: socorro Source File: test_views.py
    @mock.patch('crashstats.crashstats.models.Bugs.get')
    def test_search_results(self, cpost):
        def mocked_post(**options):
            return {
                "hits": [
                    {
                        "id": "123456",
                        "signature": u"nsASDOMWindowEnumerator::GetNext()"
                    }
                ],
                "total": 1
            }

        cpost.side_effect = mocked_post

        def mocked_supersearch_get(**params):
            assert '_columns' in params

            if 'product' in params and 'WaterWolf' in params['product']:
                results = {
                    "hits": [
                        {
                            "signature": "nsASDOMWindowEnumerator::GetNext()",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa1",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": 888981
                        },
                        {
                            "signature": "mySignatureIsCool",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa2",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": 888981
                        },
                        {
                            "signature": "mineIsCoolerThanYours",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa3",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": None
                        },
                        {
                            "signature": "EMPTY",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa4",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": None
                        }
                    ],
                    "facets": {
                        "signature": [
                            {
                                "term": "nsASDOMWindowEnumerator::GetNext()",
                                "count": 1
                            },
                            {
                                "term": "mySignatureIsCool",
                                "count": 1
                            },
                            {
                                "term": "mineIsCoolerThanYours",
                                "count": 1
                            },
                            {
                                "term": "EMPTY",
                                "count": 1
                            }
                        ]
                    },
                    "total": 4
                }
                results['hits'] = self.only_certain_columns(
                    results['hits'],
                    params['_columns']
                )
                return results
            elif 'product' in params and 'SeaMonkey' in params['product']:
                results = {
                    "hits": [
                        {
                            "signature": "nsASDOMWindowEnumerator::GetNext()",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": 888981
                        },
                        {
                            "signature": "mySignatureIsCool",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": 888981
                        }
                    ],
                    "facets": {
                        "build_id": [
                            {
                                "term": "888981",
                                "count": 2
                            }
                        ]
                    },
                    "total": 2
                }
                results['hits'] = self.only_certain_columns(
                    results['hits'],
                    params['_columns']
                )
                return results
            elif (
                'signature' in params and
                '~nsASDOMWindowEnumerator' in params['signature']
            ):
                results = {
                    "hits": [
                        {
                            "signature": "nsASDOMWindowEnumerator::GetNext()",
                            "date": "2017-01-31T23:12:57",
                            "uuid": "aaaaaaaaaaaaa",
                            "product": "WaterWolf",
                            "version": "1.0",
                            "platform": "Linux",
                            "build_id": 12345678
                        }
                    ],
                    "facets": {
                        "signature": [
                            {
                                "term": "nsASDOMWindowEnumerator::GetNext()",
                                "count": 1
                            }
                        ]
                    },
                    "total": 1
                }
                results['hits'] = self.only_certain_columns(
                    results['hits'],
                    params['_columns']
                )
                return results
            else:
                return {"hits": [], "facets": [], "total": 0}

        SuperSearchUnredacted.implementation().get.side_effect = (
            mocked_supersearch_get
        )

        url = reverse('supersearch.search_results')
        response = self.client.get(
            url,
            {'product': 'WaterWolf'}
        )
        eq_(response.status_code, 200)
        # Test results are existing
        ok_('table id="reports-list"' in response.content)
        ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
        ok_('mySignatureIsCool' in response.content)
        ok_('mineIsCoolerThanYours' in response.content)
        ok_('EMPTY' in response.content)
        ok_('aaaaaaaaaaaaa1' in response.content)
        ok_('888981' in response.content)
        ok_('Linux' in response.content)
        ok_('2017-01-31 23:12:57' in response.content)
        # Test facets are existing
        ok_('table id="facets-list-' in response.content)
        # Test bugs are existing
        ok_('<th scope="col">Bugs</th>' in response.content)
        ok_('123456' in response.content)
        # Test links on terms are existing
        ok_('product=%3DWaterWolf' in response.content)

        # Test with empty results
        response = self.client.get(url, {
            'product': 'NightTrain',
            'date': '2012-01-01'
        })
        eq_(response.status_code, 200)
        ok_('table id="reports-list"' not in response.content)
        ok_('No results were found' in response.content)

        # Test with a signature param
        response = self.client.get(
            url,
            {'signature': '~nsASDOMWindowEnumerator'}
        )
        eq_(response.status_code, 200)
        ok_('table id="reports-list"' in response.content)
        ok_('nsASDOMWindowEnumerator::GetNext()' in response.content)
        ok_('123456' in response.content)

        # Test with a different facet
        response = self.client.get(
            url,
            {'_facets': 'build_id', 'product': 'SeaMonkey'}
        )
        eq_(response.status_code, 200)
        ok_('table id="reports-list"' in response.content)
        ok_('table id="facets-list-' in response.content)
        ok_('888981' in response.content)
        # Bugs should not be there, they appear only in the signature facet
        ok_('<th>Bugs</th>' not in response.content)
        ok_('123456' not in response.content)

        # Test with a different columns list
        response = self.client.get(
            url,
            {'_columns': ['build_id', 'platform'], 'product': 'WaterWolf'}
        )
        eq_(response.status_code, 200)
        ok_('table id="reports-list"' in response.content)
        ok_('table id="facets-list-' in response.content)
        # The build and platform appear
        ok_('888981' in response.content)
        ok_('Linux' in response.content)
        # The crash id is always shown
        ok_('aaaaaaaaaaaaa1' in response.content)
        # The version and date do not appear
        ok_('1.0' not in response.content)
        ok_('2017' not in response.content)

        # Test missing parameters don't raise an exception.
        response = self.client.get(
            url,
            {'product': 'WaterWolf', 'date': '', 'build_id': ''}
        )
        eq_(response.status_code, 200)

Example 138

Project: edx-platform Source File: test_utils.py
    @mock.patch(UTILS_MODULE + '.get_completed_courses')
    def test_simulate_progress(self, mock_get_completed_courses):
        """Simulate the entirety of a user's progress through a program."""
        first_course_id, second_course_id = 'org/first-course/run', 'org/second-course/run'
        data = [
            factories.Program(
                organizations=[factories.Organization()],
                course_codes=[
                    factories.CourseCode(run_modes=[
                        factories.RunMode(course_key=first_course_id),
                    ]),
                    factories.CourseCode(run_modes=[
                        factories.RunMode(course_key=second_course_id),
                    ]),
                ]
            ),
            factories.Program(
                organizations=[factories.Organization()],
                course_codes=[
                    factories.CourseCode(run_modes=[factories.RunMode()]),
                ]
            ),
        ]
        self._mock_programs_api(data)

        # No enrollments, no program engaged.
        meter = utils.ProgramProgressMeter(self.user)
        self._assert_progress(meter)
        self.assertEqual(meter.completed_programs, [])

        # One enrollment, program engaged.
        self._create_enrollments(first_course_id)
        meter = utils.ProgramProgressMeter(self.user)
        program, program_id = data[0], data[0]['id']
        self._assert_progress(
            meter,
            factories.Progress(
                id=program_id,
                in_progress=self._extract_names(program, 0),
                not_started=self._extract_names(program, 1)
            )
        )
        self.assertEqual(meter.completed_programs, [])

        # Two enrollments, program in progress.
        self._create_enrollments(second_course_id)
        meter = utils.ProgramProgressMeter(self.user)
        self._assert_progress(
            meter,
            factories.Progress(
                id=program_id,
                in_progress=self._extract_names(program, 0, 1)
            )
        )
        self.assertEqual(meter.completed_programs, [])

        # One valid certificate earned, one course code complete.
        mock_get_completed_courses.return_value = [
            {'course_id': first_course_id, 'mode': MODES.verified},
        ]
        meter = utils.ProgramProgressMeter(self.user)
        self._assert_progress(
            meter,
            factories.Progress(
                id=program_id,
                completed=self._extract_names(program, 0),
                in_progress=self._extract_names(program, 1)
            )
        )
        self.assertEqual(meter.completed_programs, [])

        # Invalid certificate earned, still one course code to complete.
        mock_get_completed_courses.return_value = [
            {'course_id': first_course_id, 'mode': MODES.verified},
            {'course_id': second_course_id, 'mode': MODES.honor},
        ]
        meter = utils.ProgramProgressMeter(self.user)
        self._assert_progress(
            meter,
            factories.Progress(
                id=program_id,
                completed=self._extract_names(program, 0),
                in_progress=self._extract_names(program, 1)
            )
        )
        self.assertEqual(meter.completed_programs, [])

        # Second valid certificate obtained, all course codes complete.
        mock_get_completed_courses.return_value = [
            {'course_id': first_course_id, 'mode': MODES.verified},
            {'course_id': second_course_id, 'mode': MODES.verified},
        ]
        meter = utils.ProgramProgressMeter(self.user)
        self._assert_progress(
            meter,
            factories.Progress(
                id=program_id,
                completed=self._extract_names(program, 0, 1)
            )
        )
        self.assertEqual(meter.completed_programs, [program_id])

Example 139

Project: pyon Source File: pycc.py
def main(opts, *args, **kwargs):
    """
    Processes arguments and starts the capability container.
    """
    def prepare_logging():
        # Load logging override config if provided. Supports variants literal and path.
        logging_config_override = None
        if opts.logcfg:
            if '{' in opts.logcfg:
                # Variant 1: Value is dict of config values
                try:
                    eval_value = ast.literal_eval(opts.logcfg)
                    logging_config_override = eval_value
                except ValueError:
                    raise Exception("Value error in logcfg arg '%s'" % opts.logcfg)
            else:
                # Variant 2: Value is path to YAML file containing config values
                logutil.DEFAULT_LOGGING_PATHS.append(opts.logcfg)
        logutil.configure_logging(logutil.DEFAULT_LOGGING_PATHS, logging_config_override=logging_config_override)

    def prepare_container():
        """
        Walks through pyon initialization in a deterministic way and initializes Container.
        In particular make sure configuration is loaded in correct order and
        pycc startup arguments are considered.
        """
        import threading
        threading.current_thread().name = "CC-Main"

        # SIDE EFFECT: The import triggers static initializers: Monkey patching, setting pyon defaults
        import pyon

        from pyon.core import bootstrap, config

        # Set global testing flag to False. We are running as capability container, because
        # we started through the pycc program.
        bootstrap.testing = False

        # Set sysname if provided in startup argument
        if opts.sysname:
            bootstrap.set_sys_name(opts.sysname)
        # Trigger any initializing default logic in get_sys_name
        bootstrap.get_sys_name()

        command_line_config = kwargs

        # This holds the minimal configuration used to bootstrap pycc and pyon and connect to datastores.
        bootstrap_config = None

        # This holds the new CFG object for pyon. Build it up in proper sequence and conditions.
        pyon_config = config.read_standard_configuration()      # Initial pyon.yml + pyon.local.yml

        # Load config override if provided. Supports variants literal and list of paths
        config_override = None
        if opts.config:
            if '{' in opts.config:
                # Variant 1: Dict of config values
                try:
                    eval_value = ast.literal_eval(opts.config)
                    config_override = eval_value
                except ValueError:
                    raise Exception("Value error in config arg '%s'" % opts.config)
            else:
                # Variant 2: List of paths
                from pyon.util.config import Config
                config_override = Config([opts.config]).data

        # Determine bootstrap_config
        if opts.config_from_directory:
            # Load minimal bootstrap config if option "config_from_directory"
            bootstrap_config = config.read_local_configuration(['res/config/pyon_min_boot.yml'])
            config.apply_local_configuration(bootstrap_config, pyon.DEFAULT_LOCAL_CONFIG_PATHS)
            config.apply_configuration(bootstrap_config, config_override)
            config.apply_configuration(bootstrap_config, command_line_config)
            print "pycc: config_from_directory=True. Minimal bootstrap configuration:", bootstrap_config
        else:
            # Otherwise: Set to standard set of local config files plus command line overrides
            bootstrap_config = deepcopy(pyon_config)
            config.apply_configuration(bootstrap_config, config_override)
            config.apply_configuration(bootstrap_config, command_line_config)

        # Override sysname from config file or command line
        if not opts.sysname and bootstrap_config.get_safe("system.name", None):
            new_sysname = bootstrap_config.get_safe("system.name")
            bootstrap.set_sys_name(new_sysname)

        # Delete sysname datastores if option "force_clean" is set
        if opts.force_clean:
            from pyon.datastore import clear_couch_util
            print "pycc: force_clean=True. DROP DATASTORES for sysname=%s" % bootstrap.get_sys_name()
            clear_couch_util.clear_couch(bootstrap_config, prefix=bootstrap.get_sys_name(), sysname=bootstrap.get_sys_name())
            pyon_config.container.filesystem.force_clean=True

        from pyon.core.interfaces.interfaces import InterfaceAdmin
        iadm = InterfaceAdmin(bootstrap.get_sys_name(), config=bootstrap_config)

        # If auto_bootstrap, load config and interfaces into directory
        # Note: this is idempotent and will not alter anything if this is not the first container to run
        if bootstrap_config.system.auto_bootstrap:
            print "pycc: auto_bootstrap=True."
            stored_config = deepcopy(pyon_config)
            config.apply_configuration(stored_config, config_override)
            config.apply_configuration(stored_config, command_line_config)
            iadm.create_core_datastores()
            iadm.store_config(stored_config)

        # Determine the final pyon_config
        # - Start from standard config already set (pyon.yml + local YML files)
        # - Optionally load config from directory
        if opts.config_from_directory:
            config.apply_remote_config(bootstrap_cfg=bootstrap_config, system_cfg=pyon_config)
        # - Apply container profile specific config
        config.apply_profile_configuration(pyon_config, bootstrap_config)
        # - Reapply pyon.local.yml here again for good measure
        config.apply_local_configuration(pyon_config, pyon.DEFAULT_LOCAL_CONFIG_PATHS)
        # - Last apply any separate command line config overrides
        config.apply_configuration(pyon_config, config_override)
        config.apply_configuration(pyon_config, command_line_config)

        # Also set the immediate flag, but only if specified - it is an override
        if opts.immediate:
            from pyon.util.containers import dict_merge
            dict_merge(pyon_config, {'system':{'immediate':True}}, True)

        # Bootstrap pyon's core. Load configuration etc.
        bootstrap.bootstrap_pyon(pyon_cfg=pyon_config)

        # Delete any queues/exchanges owned by sysname if option "broker_clean" is set
        if opts.broker_clean:
            print "pycc: broker_clean=True, sysname:", bootstrap.get_sys_name()

            # build connect str
            connect_str = "-q -H %s -P %s -u %s -p %s -V %s" % (pyon_config.get_safe('server.amqp_priv.host', pyon_config.get_safe('server.amqp.host', 'localhost')),
                                                                   pyon_config.get_safe('container.exchange.management.port', '55672'),
                                                                   pyon_config.get_safe('container.exchange.management.username', 'guest'),
                                                                   pyon_config.get_safe('container.exchange.management.password', 'guest'),
                                                                   '/')

            from putil.rabbithelper import clean_by_sysname
            deleted_exchanges, deleted_queues = clean_by_sysname(connect_str, bootstrap.get_sys_name())
            print "      exchanges deleted (%s): %s" % (len(deleted_exchanges), ",".join(deleted_exchanges))
            print "         queues deleted (%s): %s" % (len(deleted_queues), ",".join(deleted_queues))

        if opts.force_clean:
            path = os.path.join(pyon_config.get_safe('container.filesystem.root', '/tmp/ion'), bootstrap.get_sys_name())
            print "force_clean: Removing", path
            FileSystem._clean(pyon_config)

        # Auto-bootstrap interfaces
        if bootstrap_config.system.auto_bootstrap:
            iadm.store_interfaces(idempotent=True)

        iadm.close()

        if opts.no_container:
            print "pycc: no_container=True. Stopping here."
            return None

        # Create the container instance
        from pyon.container.cc import Container
        container = Container(*args, **command_line_config)

        return container

    def start_container(container):
        """
        Start container and all internal managers. Returns when ready.
        """
        container.start()

    def do_work(container):
        """
        Performs initial startup actions with the container as requested in arguments.
        Then remains in container shell or infinite wait until container stops.
        Returns when container should stop. Raises an exception if anything failed.
        """
        if opts.proc:
            # Run a one-off process (with the -x argument)
            mod, proc = opts.proc.rsplit('.', 1)
            print "pycc: Starting process %s" % opts.proc
            container.spawn_process(proc, mod, proc, config={'process':{'type':'immediate'}})
            # And end
            return

        if opts.rel:
            # Start a rel file
            start_ok = container.start_rel_from_url(opts.rel)
            if not start_ok:
                raise Exception("Cannot start deploy file '%s'" % opts.rel)

        if opts.mx:
            from pyon.public import CFG
            port = CFG.get_safe('container.flask_webapp.port',8080)
            container.spawn_process("ContainerUI", "ion.core.containerui", "ContainerUI")
            print "pycc: Container UI started ... listening on http://localhost:%s" % port

        if opts.signalparent:
            import signal
            print 'pycc: Signal parent pid %d that pycc pid %d service start process is complete...' % (os.getppid(), os.getpid())
            os.kill(os.getppid(), signal.SIGUSR1)

            def is_parent_gone():
                while os.getppid() != 1:
                    gevent.sleep(1)
                print 'pycc: Now I am an orphan ... notifying serve_forever to stop'
                os.kill(os.getpid(), signal.SIGINT)
            import gevent
            ipg = gevent.spawn(is_parent_gone)

            container.gl_parent_watch = ipg

        if opts.enable_gbmonitor:
            from pyon.util.gevent_block_plugin import get_gevent_monitor_block
            get_gevent_monitor_block().start()

        if not opts.noshell and not opts.daemon:
            # Keep container running while there is an interactive shell
            from pyon.container.shell_api import get_shell_api
            setup_ipython_shell(get_shell_api(container))
        elif not opts.nomanhole:
            from pyon.container.shell_api import get_shell_api
            setup_ipython_embed(get_shell_api(container))
        else:
            container.serve_forever()

    def stop_container(container):
        try:
            if container:
                container.stop()
            return True
        except Exception as ex:
            # We want to make sure to get out here alive
            print "pycc: CONTAINER STOP ERROR"
            traceback.print_exc()
            return False

    def _exists_ipython_dir():
        # Fix OOIION-1124:
        # When multiple containers are started in parallel, all start an embedded IPython shell/manhole.
        # There exists a race condition between the IPython creating the default $HOME/.python dir
        # leading to an error.
        homedir = os.path.expanduser('~')
        homedir = os.path.realpath(homedir)
        home_ipdir = os.path.join(homedir, ".ipython")
        ipdir = os.path.normpath(os.path.expanduser(home_ipdir))
        return os.path.exists(ipdir)

    def setup_ipython_shell(shell_api=None):
        if not _exists_ipython_dir():
            log.warn("IPython profile dir not found. Attempting to avoid race condition")
            import gevent
            import random
            gevent.sleep(random.random() * 3.0)  # Introduce a random delay to make conflict less likely

        ipy_config = _setup_ipython_config()

        # monkeypatch the ipython inputhook to be gevent-friendly
        import gevent   # should be auto-monkey-patched by pyon already.
        import select

        def stdin_ready():
            infds, outfds, erfds = select.select([sys.stdin], [], [], 0)
            if infds:
                return True
            else:
                return False

        def inputhook_gevent():
            try:
                while not stdin_ready():
                    gevent.sleep(0.05)
            except KeyboardInterrupt:
                pass

            return 0

        # install the gevent inputhook
        from IPython.lib.inputhook import inputhook_manager
        inputhook_manager.set_inputhook(inputhook_gevent)
        inputhook_manager._current_gui = 'gevent'

        # First import the embeddable shell class
        from IPython.frontend.terminal.embed import InteractiveShellEmbed
        from mock import patch

        # Update namespace of interactive shell
        # TODO: Cleanup namespace even further
        if shell_api is not None:
            locals().update(shell_api)

        # Now create an instance of the embeddable shell. The first argument is a
        # string with options exactly as you would type them if you were starting
        # IPython at the system command line. Any parameters you want to define for
        # configuration can thus be specified here.
        with patch("IPython.core.interactiveshell.InteractiveShell.init_virtualenv"):
            for tries in range(3):
                try:
                    ipshell = InteractiveShellEmbed(config=ipy_config,
                        banner1 = """           ____  ____  _____   __     __     ____________
          / __ \/ __ \/  _/ | / /__  / /_   / ____/ ____/
         / / / / / / // //  |/ / _ \/ __/  / /   / /
        / /_/ / /_/ // // /|  /  __/ /_   / /___/ /___
        \____/\____/___/_/ |_/\___/\__/   \____/\____/""",
                        exit_msg = 'Leaving OOINet CC shell, shutting down container.')

                    ipshell('Pyon (PID: %s) - OOINet CC interactive IPython shell. Type ionhelp() for help' % os.getpid())
                    break
                except Exception as ex:
                    log.debug("Failed IPython initialize attempt (try #%s): %s", tries, str(ex))
                    import gevent
                    import random
                    gevent.sleep(random.random() * 0.5)

    def setup_ipython_embed(shell_api=None):
        if not _exists_ipython_dir():
            log.warn("IPython profile dir not found. Attempting to avoid race condition")
            import gevent
            import random
            gevent.sleep(random.random() * 3.0)  # Introduce a random delay to make conflict less likely

        from gevent_zeromq import monkey_patch
        monkey_patch()

        # patch in device:
        # gevent-zeromq does not support devices, which block in the C layer.
        # we need to support the "heartbeat" which is a simple bounceback, so we
        # simulate it using the following method.
        import zmq
        orig_device = zmq.device

        def device_patch(dev_type, insock, outsock, *args):
            if dev_type == zmq.FORWARDER:
                while True:
                    m = insock.recv()
                    outsock.send(m)
            else:
                orig_device.device(dev_type, insock, outsock, *args)

        zmq.device = device_patch

        # patch in auto-completion support
        # added in https://github.com/ipython/ipython/commit/f4be28f06c2b23cd8e4a3653b9e84bde593e4c86
        # we effectively make the same patches via monkeypatching
        from IPython.core.interactiveshell import InteractiveShell
        from IPython.zmq.ipkernel import IPKernelApp
        old_start = IPKernelApp.start
        old_set_completer_frame = InteractiveShell.set_completer_frame

        def new_start(appself):
            # restore old set_completer_frame that gets no-op'd out in ZmqInteractiveShell.__init__
            bound_scf = old_set_completer_frame.__get__(appself.shell, InteractiveShell)
            appself.shell.set_completer_frame = bound_scf
            appself.shell.set_completer_frame()
            old_start(appself)

        IPKernelApp.start = new_start

        from IPython import embed_kernel
        ipy_config = _setup_ipython_config()

        # set specific manhole options
        import tempfile#, shutil
        from mock import patch
        temp_dir = tempfile.mkdtemp()
        ipy_config.Application.ipython_dir = temp_dir

        with patch("IPython.core.interactiveshell.InteractiveShell.init_virtualenv"):
            for tries in range(3):
                try:
                    embed_kernel(local_ns=shell_api, config=ipy_config)      # blocks until INT
                    break
                except Exception as ex:
                    log.debug("Failed IPython initialize attempt (try #%s): %s", tries, str(ex))
                    import gevent
                    import random
                    gevent.sleep(random.random() * 0.5)

        # @TODO: race condition here versus ipython, this will leave junk in tmp dir
        #try:
        #    shutil.rmtree(temp_dir)
        #except shutil.Error:
        #    pass

    def _setup_ipython_config():
        from IPython.config.loader import Config
        ipy_config = Config()
        ipy_config.KernelApp.connection_file = os.path.join(os.path.abspath(os.curdir), "manhole-%s.json" % os.getpid())
        ipy_config.PromptManager.in_template = '><> '
        ipy_config.PromptManager.in2_template = '... '
        ipy_config.PromptManager.out_template = '--> '
        ipy_config.InteractiveShellEmbed.confirm_exit = False
        #ipy_config.Application.log_level = 10      # uncomment for debug level ipython logging

        return ipy_config

    # main() -----> ENTER
    # ----------------------------------------------------------------------------------
    # Container life cycle

    prepare_logging()
    container = None
    try:
        container = prepare_container()
        if container is None:
            sys.exit(0)

        start_container(container)
    except Exception as ex:
#        print "pycc: ===== CONTAINER START ERROR -- FAIL ====="
#        traceback.print_exc()
        log.error('container start error', exc_info=True)
        stop_container(container)
        sys.exit(1)

    try:
        do_work(container)
    except Exception as ex:
        stop_container(container)
#        print "pycc: ===== CONTAINER PROCESS START ERROR -- ABORTING ====="
#        print ex
        log.error('container process interruption', exc_info=True)

        sys.exit(1)

    # Assumption: stop is so robust, it does not fail even if it was only partially started
    stop_ok = stop_container(container)
    if not stop_ok:
        sys.exit(1)

Example 140

Project: oauth2client Source File: test_appengine.py
    @mock.patch('oauth2client.transport.get_http_object')
    def test_required(self, new_http):
        new_http.return_value = http_mock.HttpMock(data=DEFAULT_RESP)
        # An initial request to an oauth_required decorated path should be a
        # redirect to start the OAuth dance.
        self.assertEqual(self.decorator.flow, None)
        self.assertEqual(self.decorator.credentials, None)
        response = self.app.get('http://localhost/foo_path')
        self.assertTrue(response.status.startswith('302'))
        q = urllib.parse.parse_qs(
            response.headers['Location'].split('?', 1)[1])
        self.assertEqual('http://localhost/oauth2callback',
                         q['redirect_uri'][0])
        self.assertEqual('foo_client_id', q['client_id'][0])
        self.assertEqual('foo_scope bar_scope', q['scope'][0])
        self.assertEqual('http://localhost/foo_path',
                         q['state'][0].rsplit(':', 1)[0])
        self.assertEqual('code', q['response_type'][0])
        self.assertEqual(False, self.decorator.has_credentials())

        with mock.patch.object(appengine, '_parse_state_value',
                               return_value='foo_path',
                               autospec=True) as parse_state_value:
            # Now simulate the callback to /oauth2callback.
            response = self.app.get('/oauth2callback', {
                'code': 'foo_access_code',
                'state': 'foo_path:xsrfkey123',
            })
            parts = response.headers['Location'].split('?', 1)
            self.assertEqual('http://localhost/foo_path', parts[0])
            self.assertEqual(None, self.decorator.credentials)
            if self.decorator._token_response_param:
                response_query = urllib.parse.parse_qs(parts[1])
                response = response_query[
                    self.decorator._token_response_param][0]
                self.assertEqual(json.loads(DEFAULT_RESP),
                                 json.loads(urllib.parse.unquote(response)))
            self.assertEqual(self.decorator.flow, self.decorator._tls.flow)
            self.assertEqual(self.decorator.credentials,
                             self.decorator._tls.credentials)

            parse_state_value.assert_called_once_with(
                'foo_path:xsrfkey123', self.current_user)

        # Now requesting the decorated path should work.
        response = self.app.get('/foo_path')
        self.assertEqual('200 OK', response.status)
        self.assertEqual(True, self.had_credentials)
        self.assertEqual('foo_refresh_token',
                         self.found_credentials.refresh_token)
        self.assertEqual('foo_access_token',
                         self.found_credentials.access_token)
        self.assertEqual(None, self.decorator.credentials)

        # Raising an exception still clears the Credentials.
        self.should_raise = Exception('')
        with self.assertRaises(Exception):
            self.app.get('/foo_path')
        self.should_raise = False
        self.assertEqual(None, self.decorator.credentials)

        # Access token refresh error should start the dance again
        self.should_raise = client.AccessTokenRefreshError()
        response = self.app.get('/foo_path')
        self.should_raise = False
        self.assertTrue(response.status.startswith('302'))
        query_params = urllib.parse.parse_qs(
            response.headers['Location'].split('?', 1)[1])
        self.assertEqual('http://localhost/oauth2callback',
                         query_params['redirect_uri'][0])

        # Invalidate the stored Credentials.
        self.found_credentials.invalid = True
        self.found_credentials.store.put(self.found_credentials)

        # Invalid Credentials should start the OAuth dance again.
        response = self.app.get('/foo_path')
        self.assertTrue(response.status.startswith('302'))
        query_params = urllib.parse.parse_qs(
            response.headers['Location'].split('?', 1)[1])
        self.assertEqual('http://localhost/oauth2callback',
                         query_params['redirect_uri'][0])

        # Check the mocks were called.
        new_http.assert_called_once_with()

Example 141

Project: cinder Source File: test.py
    def setUp(self):
        """Run before each test method to initialize test environment."""
        super(TestCase, self).setUp()

        # Create default notifier
        self.notifier = fake_notifier.get_fake_notifier()

        # Mock rpc get notifier with fake notifier method that joins all
        # notifications with the default notifier
        p = mock.patch('cinder.rpc.get_notifier',
                       side_effect=self._get_joined_notifier)
        p.start()

        if self.MOCK_WORKER:
            # Mock worker creation for all tests that don't care about it
            clean_path = 'cinder.objects.cleanable.CinderCleanableObject.%s'
            for method in ('create_worker', 'set_worker', 'unset_worker'):
                self.patch(clean_path % method, return_value=None)

        # Unit tests do not need to use lazy gettext
        i18n.enable_lazy(False)

        test_timeout = os.environ.get('OS_TEST_TIMEOUT', 0)
        try:
            test_timeout = int(test_timeout)
        except ValueError:
            # If timeout value is invalid do not set a timeout.
            test_timeout = 0
        if test_timeout > 0:
            self.useFixture(fixtures.Timeout(test_timeout, gentle=True))
        self.useFixture(fixtures.NestedTempfile())
        self.useFixture(fixtures.TempHomeDir())

        environ_enabled = (lambda var_name:
                           strutils.bool_from_string(os.environ.get(var_name)))
        if environ_enabled('OS_STDOUT_CAPTURE'):
            stdout = self.useFixture(fixtures.StringStream('stdout')).stream
            self.useFixture(fixtures.MonkeyPatch('sys.stdout', stdout))
        if environ_enabled('OS_STDERR_CAPTURE'):
            stderr = self.useFixture(fixtures.StringStream('stderr')).stream
            self.useFixture(fixtures.MonkeyPatch('sys.stderr', stderr))

        self.useFixture(log_fixture.get_logging_handle_error_fixture())
        self.useFixture(cinder_fixtures.StandardLogging())

        rpc.add_extra_exmods("cinder.tests.unit")
        self.addCleanup(rpc.clear_extra_exmods)
        self.addCleanup(rpc.cleanup)

        self.messaging_conf = messaging_conffixture.ConfFixture(CONF)
        self.messaging_conf.transport_driver = 'fake'
        self.messaging_conf.response_timeout = 15
        self.useFixture(self.messaging_conf)
        rpc.init(CONF)

        # NOTE(geguileo): This is required because _determine_obj_version_cap
        # and _determine_rpc_version_cap functions in cinder.rpc.RPCAPI cache
        # versions in LAST_RPC_VERSIONS and LAST_OBJ_VERSIONS so we may have
        # weird interactions between tests if we don't clear them before each
        # test.
        rpc.LAST_OBJ_VERSIONS = {}
        rpc.LAST_RPC_VERSIONS = {}

        conf_fixture.set_defaults(CONF)
        CONF([], default_config_files=[])

        # NOTE(vish): We need a better method for creating fixtures for tests
        #             now that we have some required db setup for the system
        #             to work properly.
        self.start = timeutils.utcnow()

        CONF.set_default('connection', 'sqlite://', 'database')
        CONF.set_default('sqlite_synchronous', False, 'database')

        global _DB_CACHE
        if not _DB_CACHE:
            _DB_CACHE = Database(sqla_api, migration,
                                 sql_connection=CONF.database.connection)
        self.useFixture(_DB_CACHE)

        # NOTE(danms): Make sure to reset us back to non-remote objects
        # for each test to avoid interactions. Also, backup the object
        # registry.
        objects_base.CinderObject.indirection_api = None
        self._base_test_obj_backup = copy.copy(
            objects_base.CinderObjectRegistry._registry._obj_classes)
        self.addCleanup(self._restore_obj_registry)

        # emulate some of the mox stuff, we can't use the metaclass
        # because it screws with our generators
        mox_fixture = self.useFixture(moxstubout.MoxStubout())
        self.mox = mox_fixture.mox
        self.stubs = mox_fixture.stubs
        self.addCleanup(CONF.reset)
        self.addCleanup(self._common_cleanup)
        self.injected = []
        self._services = []

        fake_notifier.mock_notifier(self)

        self.override_config('fatal_exception_format_errors', True)
        # This will be cleaned up by the NestedTempfile fixture
        lock_path = self.useFixture(fixtures.TempDir()).path
        self.fixture = self.useFixture(
            config_fixture.Config(lockutils.CONF))
        self.fixture.config(lock_path=lock_path,
                            group='oslo_concurrency')
        lockutils.set_defaults(lock_path)
        self.override_config('policy_file',
                             os.path.join(
                                 os.path.abspath(
                                     os.path.join(
                                         os.path.dirname(__file__),
                                         '..',
                                     )
                                 ),
                                 self.POLICY_PATH),
                             group='oslo_policy')

        self._disable_osprofiler()
        self._disallow_invalid_uuids()

        # NOTE(geguileo): This is required because common get_by_id method in
        # cinder.db.sqlalchemy.api caches get methods and if we use a mocked
        # get method in one test it would carry on to the next test.  So we
        # clear out the cache.
        sqla_api._GET_METHODS = {}

        self.override_config('backend_url', 'file://' + lock_path,
                             group='coordination')
        coordination.COORDINATOR.start()
        self.addCleanup(coordination.COORDINATOR.stop)

Example 142

Project: python-lib Source File: keenio_tests.py
    def test_get_latest_buildtime(self):
        """Test keenio.get_latest_buildtime()"""
        patcher = mock.patch(
            'keen.extraction',
            return_value=[
                {
                    "job": {
                        "duration": 345.56
                    }
                }
            ]
        )
        keen_extract_func = patcher.start()

        self.assertEqual(-1, keenio.get_latest_buildtime())
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

        # test with some token (value doesn't matter, keen.extract is mocked)
        keen.project_id = "1234abcd"
        keen.read_key = "4567abcd5678efgh"
        self.assertEqual(345.56, keenio.get_latest_buildtime("test/repo"))

        # test parameters passed to keen.average
        args, kwargs = keen_extract_func.call_args
        self.assertEqual(args, ("build_jobs",))
        self.assertDictEqual(kwargs, {
            'property_names': 'job.duration',
            'latest': 1,
            'filters': [{
                'operator': 'eq',
                'property_name': 'buildtime_trend.project_name',
                'property_value': 'test/repo'
            }]
        })

        # query returned two values (shouldn't happen, but test anyway)
        keen_extract_func.return_value = [
            {
                "job": {
                    "duration": 123.45
                }
            },
            {
                "job": {
                    "duration": 345.56
                }
            }
        ]
        self.assertEqual(123.45, keenio.get_latest_buildtime("test/repo"))

        # return -1 if no value is returned
        keen_extract_func.return_value = []
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

        # returned value is invalid
        keen_extract_func.return_value = [
            {
                "something": {
                    "else": 345.56
                }
            }
        ]
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

        # returned value is empty
        keen_extract_func.return_value = None
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

        # returned value isn't a list
        keen_extract_func.return_value = {}
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))
        keen_extract_func.return_value = 1234
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))
        keen_extract_func.return_value = "test"
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

        # test raising ConnectionError
        keen_extract_func.side_effect = requests.ConnectionError
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

        # test raising KeenApiError (call with invalid read_key)
        keen_extract_func.side_effect = keen.exceptions.KeenApiError(
            self.test_api_error
        )
        self.assertEqual(-1, keenio.get_latest_buildtime("test/repo"))

Example 143

Project: ec2-api Source File: test_vpn_gateway.py
    @mock.patch('ec2api.api.vpn_connection._reset_vpn_connections',
                wraps=vpn_connection_api._reset_vpn_connections)
    @mock.patch('ec2api.api.vpn_gateway._create_subnet_vpnservice',
                wraps=vpn_gateway_api._create_subnet_vpnservice)
    def test_attach_vpn_gateway(self, create_vpnservice,
                                reset_vpn_connections):
        create_vpnservice_calls = []
        create_vpnservice.side_effect = (
            tools.deepcopy_call_args_saver(create_vpnservice_calls))
        mock_manager = mock.Mock()
        mock_manager.attach_mock(create_vpnservice, 'create_vpnservice')
        mock_manager.attach_mock(reset_vpn_connections,
                                 'reset_vpn_connections')
        self.configure(external_network=fakes.NAME_OS_PUBLIC_NETWORK)
        subnet_2 = tools.patch_dict(fakes.DB_SUBNET_2,
                                    {'vpc_id': fakes.ID_EC2_VPC_2},
                                    ('os_vpnservice_id',))
        self.set_mock_db_items(
            fakes.DB_VPN_GATEWAY_1, fakes.DB_VPN_GATEWAY_2,
            fakes.DB_VPC_2, fakes.DB_IGW_1, fakes.DB_IGW_2,
            fakes.DB_SUBNET_1, subnet_2)
        subnet_2_updated = tools.update_dict(
            subnet_2, {'os_vpnservice_id': fakes.ID_OS_VPNSERVICE_2})
        os_vpnservice_2 = tools.patch_dict(fakes.OS_VPNSERVICE_2,
                                           {'router_id': fakes.ID_OS_ROUTER_2},
                                           ('id',))
        self.neutron.list_networks.return_value = (
            {'networks': [{'id': fakes.ID_OS_PUBLIC_NETWORK}]})
        self.neutron.create_vpnservice.side_effect = tools.get_neutron_create(
            'vpnservice', fakes.ID_OS_VPNSERVICE_2)

        def do_check():
            resp = self.execute('AttachVpnGateway',
                                {'VpcId': fakes.ID_EC2_VPC_2,
                                 'VpnGatewayId': fakes.ID_EC2_VPN_GATEWAY_2})
            self.assertEqual({'attachment': {'state': 'attached',
                                             'vpcId': fakes.ID_EC2_VPC_2}},
                             resp)
            self.assertEqual(2, self.db_api.update_item.call_count)
            self.db_api.update_item.assert_has_calls(
                [mock.call(mock.ANY, self.DB_VPN_GATEWAY_2_ATTACHED),
                 mock.call(mock.ANY, subnet_2_updated)])
            self.neutron.create_vpnservice.assert_called_once_with(
                {'vpnservice': os_vpnservice_2})
            self.assertEqual(1, len(create_vpnservice_calls))
            self.assertEqual(
                mock.call(mock.ANY, self.neutron, mock.ANY, subnet_2,
                          fakes.DB_VPC_2),
                create_vpnservice_calls[0])
            self.assertIsInstance(create_vpnservice_calls[0][1][2],
                                  common.OnCrashCleaner)
            reset_vpn_connections.assert_called_once_with(
                mock.ANY, self.neutron, mock.ANY,
                self.DB_VPN_GATEWAY_2_ATTACHED, subnets=[subnet_2_updated])
            self.assertIsInstance(reset_vpn_connections.call_args[0][2],
                                  common.OnCrashCleaner)
            mock_manager.assert_has_calls([
                mock.call.create_vpnservice(
                    *(mock.ANY for _x in range(5))),
                mock.call.reset_vpn_connections(
                    subnets=mock.ANY, *(mock.ANY for _x in range(4)))])

        do_check()
        self.neutron.add_gateway_router.assert_called_once_with(
            fakes.ID_OS_ROUTER_2,
            {'network_id': fakes.ID_OS_PUBLIC_NETWORK})
        self.neutron.list_networks.assert_called_once_with(
            **{'router:external': True,
               'name': fakes.NAME_OS_PUBLIC_NETWORK})

        # Internet gateway is already attached
        self.db_api.reset_mock()
        self.neutron.reset_mock()
        del create_vpnservice_calls[:]
        reset_vpn_connections.reset_mock()
        mock_manager.reset_mock()
        igw_2 = tools.update_dict(fakes.DB_IGW_2,
                                  {'vpc_id': fakes.ID_EC2_VPC_2})
        self.add_mock_db_items(igw_2)
        do_check()
        self.assertFalse(self.neutron.add_gateway_router.called)

Example 144

Project: PerfKitBenchmarker Source File: fio_test.py
  def testParseFioResults(self):
    with mock.patch(
        fio.__name__ + '.ParseJobFile',
        return_value={
            'sequential_write': {},
            'sequential_read': {},
            'random_write_test': {},
            'random_read_test': {},
            'random_read_test_parallel': {}}):
      result = fio.ParseResults('', self.result_contents)
      expected_result = [
          ['sequential_write:write:bandwidth', 68118, 'KB/s',
           {'bw_max': 74454, 'bw_agg': 63936.8,
            'bw_min': 19225, 'bw_dev': 20346.28,
            'bw_mean': 63936.8, 'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency', 477734.84, 'usec',
           {'max': 869891, 'stddev': 92609.34, 'min': 189263, 'mean': 477734.84,
            'p60': 444416, 'p1': 387072, 'p99.9': 872448, 'p70': 448512,
            'p5': 440320, 'p90': 610304, 'p99.95': 872448, 'p80': 452608,
            'p95': 724992, 'p10': 440320, 'p99.5': 847872, 'p99': 823296,
            'p20': 440320, 'p99.99': 872448, 'p30': 444416, 'p50': 444416,
            'p40': 444416, 'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:min', 189263, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:max', 869891, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:mean', 477734.84, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:stddev', 92609.34, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p1', 387072, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p5', 440320, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p10', 440320, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p20', 440320, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p30', 444416, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p40', 444416, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p50', 444416, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p60', 444416, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p70', 448512, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p80', 452608, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p90', 610304, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p95', 724992, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p99', 823296, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p99.5', 847872, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p99.9', 872448, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p99.95', 872448, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:latency:p99.99', 872448, 'usec',
           {'fio_job': 'sequential_write'}],
          ['sequential_write:write:iops', 133, '',
           {'fio_job': 'sequential_write'}],
          ['sequential_read:read:bandwidth', 129836, 'KB/s',
           {'bw_max': 162491, 'bw_agg': 130255.2,
            'bw_min': 115250, 'bw_dev': 18551.37,
            'bw_mean': 130255.2, 'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency', 250667.06, 'usec',
           {'max': 528542, 'stddev': 70403.40, 'min': 24198, 'mean': 250667.06,
            'p60': 268288, 'p1': 59136, 'p99.9': 528384, 'p70': 272384,
            'p5': 116224, 'p90': 292864, 'p99.95': 528384, 'p80': 280576,
            'p95': 366592, 'p10': 164864, 'p99.5': 489472, 'p99': 473088,
            'p20': 199680, 'p99.99': 528384, 'p30': 246784, 'p50': 264192,
            'p40': 257024, 'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:min', 24198, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:max', 528542, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:mean', 250667.06, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:stddev', 70403.40, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p1', 59136, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p5', 116224, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p10', 164864, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p20', 199680, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p30', 246784, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p40', 257024, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p50', 264192, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p60', 268288, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p70', 272384, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p80', 280576, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p90', 292864, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p95', 366592, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p99', 473088, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p99.5', 489472, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p99.9', 528384, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p99.95', 528384, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:latency:p99.99', 528384, 'usec',
           {'fio_job': 'sequential_read'}],
          ['sequential_read:read:iops', 253, '',
           {'fio_job': 'sequential_read'}],
          ['random_write_test:write:bandwidth', 6443, 'KB/s',
           {'bw_max': 7104, 'bw_agg': 6446.55,
            'bw_min': 5896, 'bw_dev': 336.21,
            'bw_mean': 6446.55, 'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency', 587.02, 'usec',
           {'max': 81806, 'stddev': 897.93, 'min': 1, 'mean': 587.02,
            'p60': 524, 'p1': 446, 'p99.9': 3216, 'p70': 532,
            'p5': 462, 'p90': 636, 'p99.95': 4128, 'p80': 564,
            'p95': 1064, 'p10': 470, 'p99.5': 1736, 'p99': 1688,
            'p20': 482, 'p99.99': 81408, 'p30': 494, 'p50': 510,
            'p40': 502, 'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:min', 1, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:max', 81806, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:mean', 587.02, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:stddev', 897.93, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p1', 446, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p5', 462, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p10', 470, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p20', 482, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p30', 494, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p40', 502, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p50', 510, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p60', 524, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p70', 532, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p80', 564, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p90', 636, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p95', 1064, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p99', 1688, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p99.5', 1736, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p99.9', 3216, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p99.95', 4128, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:latency:p99.99', 81408, 'usec',
           {'fio_job': 'random_write_test'}],
          ['random_write_test:write:iops', 1610, '',
           {'fio_job': 'random_write_test'}],
          ['random_read_test:read:bandwidth', 1269, 'KB/s',
           {'bw_max': 1745, 'bw_agg': 1275.52,
            'bw_min': 330, 'bw_dev': 201.59,
            'bw_mean': 1275.52, 'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency', 3117.62, 'usec',
           {'max': 352736, 'stddev': 5114.37, 'min': 0, 'mean': 3117.62,
            'p60': 3312, 'p1': 524, 'p99.9': 6880, 'p70': 3344,
            'p5': 588, 'p90': 3408, 'p99.95': 11840, 'p80': 3376,
            'p95': 3440, 'p10': 2544, 'p99.5': 4128, 'p99': 3728,
            'p20': 3152, 'p99.99': 354304, 'p30': 3216, 'p50': 3280,
            'p40': 3248, 'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:min', 0, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:max', 352736, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:mean', 3117.62, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:stddev', 5114.37, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p1', 524, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p5', 588, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p10', 2544, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p20', 3152, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p30', 3216, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p40', 3248, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p50', 3280, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p60', 3312, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p70', 3344, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p80', 3376, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p90', 3408, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p95', 3440, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p99', 3728, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p99.5', 4128, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p99.9', 6880, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p99.95', 11840, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:latency:p99.99', 354304, 'usec',
           {'fio_job': 'random_read_test'}],
          ['random_read_test:read:iops', 317, '',
           {'fio_job': 'random_read_test'}],
          ['random_read_test_parallel:read:bandwidth', 1292, 'KB/s',
           {'bw_max': 1693, 'bw_agg': 1284.71,
            'bw_min': 795, 'bw_dev': 88.67,
            'bw_mean': 1284.71, 'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency', 198030.44, 'usec',
           {'max': 400078, 'stddev': 21709.40, 'min': 0, 'mean': 198030.44,
            'p60': 199680, 'p1': 65280, 'p99.9': 370688, 'p70': 203776,
            'p5': 189440, 'p90': 205824, 'p99.95': 387072, 'p80': 203776,
            'p95': 209920, 'p10': 189440, 'p99.5': 257024, 'p99': 209920,
            'p20': 193536, 'p99.99': 399360, 'p30': 197632, 'p50': 199680,
            'p40': 197632, 'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:min', 0,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:max', 400078,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:mean', 198030.44,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:stddev', 21709.40,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p1', 65280,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p5', 189440,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p10', 189440,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p20', 193536,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p30', 197632,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p40', 197632,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p50', 199680,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p60', 199680,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p70', 203776,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p80', 203776,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p90', 205824,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p95', 209920,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p99', 209920,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p99.5', 257024,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p99.9', 370688,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p99.95', 387072,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:latency:p99.99', 399360,
           'usec', {'fio_job': 'random_read_test_parallel'}],
          ['random_read_test_parallel:read:iops', 323, '',
           {'fio_job': 'random_read_test_parallel'}]]
      expected_result = [sample.Sample(*sample_tuple)
                         for sample_tuple in expected_result]
      self.assertSampleListsEqualUpToTimestamp(result, expected_result)

Example 145

Project: aldryn-events Source File: test_plugins.py
    @mock.patch('aldryn_events.managers.timezone')
    def test_upcoming_plugin_for_past(self, timezone_mock):
        """
        Test the upcoming events plugin for past entries
        """
        timezone_mock.now.return_value = tz_datetime(2014, 7, 6, 12)
        evpage = api.create_page(
            title='Events en', template=self.template, language='en',
            published=True,
            apphook='EventListAppHook',
            parent=self.create_root_page(),
            apphook_namespace=self.app_config.namespace,
            publication_date=tz_datetime(2014, 1, 1)
        )
        api.create_title('de', 'Events de', evpage)
        evpage.publish('en')
        evpage.publish('de')
        page = api.create_page(
            'Home en', self.template, 'en', published=True, slug='home',
        )
        api.create_title('de', 'Home de', page)
        ph = page.placeholders.get(slot='content')
        plugin_en = api.add_plugin(ph, 'UpcomingPlugin', 'en',
                                   app_config=self.app_config)
        plugin_de = api.add_plugin(ph, 'UpcomingPlugin', 'de',
                                   app_config=self.app_config)
        plugin_en.past_events, plugin_de.past_events = True, True
        plugin_en.save()
        plugin_de.save()
        page.publish('en')
        page.publish('de')

        for i in range(1, 7):
            self.new_event_from_num(
                i,
                start_date=tz_datetime(2014, 6, 29),
                end_date=tz_datetime(2014, 7, 5),
                publish_at=tz_datetime(2014, 6, 20, 12)
            )
        start_date = tz_datetime(2014, 6, 29)
        start_time = start_date.now().time()
        e0 = Event.objects.filter(start_date=start_date)[0]
        e0.start_time = start_time
        e0.save()
        # Test plugin rendering for both languages in a forloop. I don't
        # like it but save lot of text space since we test for 5 entries
        rendered = {}
        with force_language('en'):
            response = self.client.get(page.get_absolute_url('en'))
            rendered['en'] = response.content.decode('utf-8')
        with force_language('de'):
            response = self.client.get(page.get_absolute_url('de'))
            rendered['de'] = response.content.decode('utf-8')

        for i in range(1, 6):
            for lang in ['en', 'de']:
                text = 'event' if lang == 'en' else 'ereignis'
                name = '{0} {1} {2}'.format(text, i, lang)
                expected_slug = '{0}-{1}-{2}/'.format(text, i, lang)
                apphooh_url = self.get_apphook_url(language=lang)
                url = '{0}{1}'.format(apphooh_url, expected_slug)
                self.assertIn(
                    name, rendered[lang],
                    'Title "{0}" not found in rendered plugin for '
                    'language "{1}".'.format(name, lang)
                )
                self.assertIn(
                    url, rendered[lang],
                    'URL "{0}" not found in rendered plugin for '
                    'language "{1}".'.format(url, lang)
                )

        self.assertNotIn(
            'event 6 en', rendered,
            'Title "event 6 en" found in rendered plugin, but limit is 5 '
            'entries.'
        )
        self.assertNotIn(
            'event-6-en', rendered,
            'URL "event-6-en" found in rendered plugin, but limit is 5 '
            'entries.'
        )
        self.assertNotIn(
            'event 6 de', rendered,
            'Title "event 6 de" found in rendered plugin, but limit is 5 '
            'entries.'
        )
        self.assertNotIn(
            'event-6-de', rendered,
            'URL "event-6-de" found in rendered plugin, but limit is 5 '
            'entries.'
        )

Example 146

Project: bitex Source File: test_signals.py
Function: test_subscribe
  @mock.patch('zmq.eventloop.zmqstream.ZMQStream')
  def testSubscribe(self, ZMQStreamMock):
    self.md_subscriber["BTCUSD"].subscribe(
        self.zmq_context,
        self.trade_pub,
        self.application_trade_client)

    self.application_trade_client.sendJSON.assert_called_with({
        'MDEntryTypes': ['0', '1', '2'],
        'Instruments': ['BTCUSD'],
        'MsgType': 'V',
        'TradeDate': '20150115',
        'MDReqID': '0',
        'MDUpdateType': '0',
        'SubscriptionRequestType': '0',
        'MarketDepth': 0
      })


    md_full_refresh_msg = {"MDReqID": "0", "Symbol": "BTCUSD", "MsgType": "W", "MDFullGrp": [], "MarketDepth": 0}
    self.md_subscriber["BTCUSD"].on_md_full_refresh(md_full_refresh_msg)


    self.md_subscriptions["0"] = []
    self.md_subscriptions["0"].append(
      MarketDataPublisher(
        "0",
        0,
        ["0","1"],
        "BTCUSD",
        self.on_send_json_msg_to_user,
        False))
    print len(signal_publish_md_order_depth_incremental._methods_subs['BTCUSD.3'].items())

    md_incrementa_msg = []
    md_incrementa_msg.append({
      "MsgType": "X",
      "MDBkTyp": "3",
      "MDIncGrp": [{
        "OrderID": 1,
        "MDEntryPx": 40000000000,
        "UserID": 90000002,
        "MDEntryPositionNo": 1,
        "Username": "user",
        "MDUpdateAction": "0",
        "MDEntryTime": "22:08:14",
        "Symbol": "BTCUSD",
        "Broker": "exchange",
        "MDEntryType": "1",
        "MDEntrySize": 100000000,
        "MDEntryID": 1,
        "MDEntryDate": "2015-01-15"
      }]
    })
    self.md_subscriber["BTCUSD"].on_md_incremental(md_incrementa_msg[0])
    self.assertAlmostEquals( 1 ,self.on_send_json_msg_to_user.call_count)
    self.assertAlmostEquals( 0 , len(self.md_subscriber["BTCUSD"].buy_side) )
    self.assertAlmostEquals( 1 , len(self.md_subscriber["BTCUSD"].sell_side) )


    md_incrementa_msg.append({
      "MsgType": "X",
      "MDBkTyp": "3",
      "MDIncGrp": [{
        "OrderID": 2,
        "MDEntryPx": 40000000000,
        "UserID": 90000002,
        "MDEntryPositionNo": 2,
        "Username": "user",
        "MDUpdateAction": "0",
        "MDEntryTime": "22:10:28",
        "Symbol": "BTCUSD",
        "Broker": "exchange",
        "MDEntryType": "1",
        "MDEntrySize": 100000000,
        "MDEntryID": 2,
        "MDEntryDate": "2015-01-15"
      }]
    })
    self.md_subscriber["BTCUSD"].on_md_incremental(md_incrementa_msg[1])
    self.assertAlmostEquals( 2 ,self.on_send_json_msg_to_user.call_count)
    self.assertAlmostEquals( 0 , len(self.md_subscriber["BTCUSD"].buy_side) )
    self.assertAlmostEquals( 2 , len(self.md_subscriber["BTCUSD"].sell_side) )

    print len(signal_publish_md_order_depth_incremental._methods_subs['BTCUSD.3'].items())

    # emulate 2k connections
    for x in xrange(1,2000):
      self.md_subscriptions[str(x)] = []
      self.md_subscriptions[str(x)].append(
        MarketDataPublisher(
          "0",
          str(x),
          ["0","1"],
          "BTCUSD",
          self.on_send_json_msg_to_user,
          False))

    md_incrementa_msg.append({
      "MsgType": "X",
      "MDBkTyp": "3",
      "MDIncGrp": [{
        "OrderID": 3,
        "MDEntryPx": 40000000000,
        "UserID": 90000002,
        "MDEntryPositionNo": 3,
        "Username": "user",
        "MDUpdateAction": "0",
        "MDEntryTime": "22:10:38",
        "Symbol": "BTCUSD",
        "Broker": "exchange",
        "MDEntryType": "1",
        "MDEntrySize": 100000000,
        "MDEntryID": 3,
        "MDEntryDate": "2015-01-15"
      }]
    })
    self.on_send_json_msg_to_user.reset_mock()
    self.md_subscriber["BTCUSD"].on_md_incremental(md_incrementa_msg[2])
    self.assertAlmostEquals( 0 , len(self.md_subscriber["BTCUSD"].buy_side) )
    self.assertAlmostEquals( 3 , len(self.md_subscriber["BTCUSD"].sell_side) )
    self.assertAlmostEquals( 2000 ,self.on_send_json_msg_to_user.call_count)
    print len(signal_publish_md_order_depth_incremental._methods_subs['BTCUSD.3'].items())

    # let's close the first 1k connections
    for x in xrange(0,1000):
      del self.md_subscriptions[str(x)]

    md_incrementa_msg.append({
      "MsgType": "X",
      "MDBkTyp": "3",
      "MDIncGrp": [{
        "OrderID": 4,
        "MDEntryPx": 40000000000,
        "UserID": 90000002,
        "MDEntryPositionNo": 4,
        "Username": "user",
        "MDUpdateAction": "0",
        "MDEntryTime": "22:10:38",
        "Symbol": "BTCUSD",
        "Broker": "exchange",
        "MDEntryType": "1",
        "MDEntrySize": 100000000,
        "MDEntryID": 4,
        "MDEntryDate": "2015-01-15"
      }]
    })
    self.on_send_json_msg_to_user.reset_mock()
    self.md_subscriber["BTCUSD"].on_md_incremental(md_incrementa_msg[2])
    self.assertAlmostEquals( 0 , len(self.md_subscriber["BTCUSD"].buy_side) )
    self.assertAlmostEquals( 4 , len(self.md_subscriber["BTCUSD"].sell_side) )
    self.assertAlmostEquals( 1000 ,self.on_send_json_msg_to_user.call_count)

    print len(signal_publish_md_status._methods_subs['MD_STATUS'].items())
    print len(signal_publish_md_order_depth_incremental._methods_subs['BTCUSD.3'].items())

Example 147

Project: osf.io Source File: test_user.py
    @mock.patch('website.mailchimp_utils.get_mailchimp_api')
    def test_merge(self, mock_get_mailchimp_api):
        other_user = factories.UserFactory()
        other_user.save()

        # define values for users' fields
        today = datetime.datetime.now()
        yesterday = today - datetime.timedelta(days=1)

        self.user.comments_viewed_timestamp['shared_gt'] = today
        other_user.comments_viewed_timestamp['shared_gt'] = yesterday
        self.user.comments_viewed_timestamp['shared_lt'] = yesterday
        other_user.comments_viewed_timestamp['shared_lt'] = today
        self.user.comments_viewed_timestamp['user'] = yesterday
        other_user.comments_viewed_timestamp['other'] = yesterday

        self.user.email_verifications = {'user': {'email': 'a'}}
        other_user.email_verifications = {'other': {'email': 'b'}}

        self.user.notifications_configured = {'abc12': True}
        other_user.notifications_configured = {'123ab': True}

        self.user.external_accounts = [factories.ExternalAccountFactory()]
        other_user.external_accounts = [factories.ExternalAccountFactory()]

        self.user.mailchimp_mailing_lists = {
            'user': True,
            'shared_gt': True,
            'shared_lt': False,
        }
        other_user.mailchimp_mailing_lists = {
            'other': True,
            'shared_gt': False,
            'shared_lt': True,
        }

        self.user.security_messages = {
            'user': today,
            'shared': today,
        }
        other_user.security_messages = {
            'other': today,
            'shared': today,
        }

        self.user.system_tags = ['user', 'shared']
        other_user.system_tags = ['other', 'shared']

        self.user.watched = [factories.WatchConfigFactory()]
        other_user.watched = [factories.WatchConfigFactory()]

        self.user.save()
        other_user.save()

        # define expected behavior for ALL FIELDS of the User object
        default_to_master_user_fields = [
            '_id',
            'date_confirmed',
            'date_disabled',
            'date_last_login',
            'date_registered',
            'email_last_sent',
            'external_identity',
            'family_name',
            'fullname',
            'given_name',
            'is_claimed',
            'is_invited',
            'is_registered',
            'jobs',
            'locale',
            'merged_by',
            'middle_names',
            'password',
            'recently_added',
            'schools',
            'social',
            'suffix',
            'timezone',
            'username',
            'mailing_lists',
            'verification_key',
            'verification_key_v2',
            '_affiliated_institutions',
            'contributor_added_email_records',
            'requested_deactivation',
            'registered_by'
        ]

        calculated_fields = {
            'comments_viewed_timestamp': {
                'user': yesterday,
                'other': yesterday,
                'shared_gt': today,
                'shared_lt': today,
            },
            'email_verifications': {
                'user': {'email': 'a'},
                'other': {'email': 'b'},
            },
            'notifications_configured': {
                '123ab': True, 'abc12': True,
            },
            'emails': [
                self.user.username,
                other_user.username,
            ],
            'external_accounts': [
                self.user.external_accounts[0]._id,
                other_user.external_accounts[0]._id,
            ],
            'mailchimp_mailing_lists': {
                'user': True,
                'other': True,
                'shared_gt': True,
                'shared_lt': True,
            },
            'osf_mailing_lists': {
                'Open Science Framework Help': True
            },
            'security_messages': {
                'user': today,
                'other': today,
                'shared': today,
            },
            'system_tags': ['user', 'shared', 'other'],
            'unclaimed_records': {},
            'watched': [
                self.user.watched[0]._id,
                other_user.watched[0]._id,
            ],
        }

        # from the explicit rules above, compile expected field/value pairs
        expected = {}
        expected.update(calculated_fields)
        for key in default_to_master_user_fields:
            expected[key] = getattr(self.user, key)

        # ensure all fields of the user object have an explicit expectation
        assert_equal(
            set(expected.keys()),
            set(self.user._fields),
        )

        # mock mailchimp
        mock_client = mock.MagicMock()
        mock_get_mailchimp_api.return_value = mock_client
        mock_client.lists.list.return_value = {'data': [{'id': x, 'list_name': list_name} for x, list_name in enumerate(self.user.mailchimp_mailing_lists)]}

        # perform the merge
        self.user.merge_user(other_user)
        self.user.save()
        handlers.celery_teardown_request()

        # check each field/value pair
        for k, v in expected.iteritems():
            assert_equal(
                getattr(self.user, k),
                v,
                # "{} doesn't match expectation".format(k)
            )

        # check fields set on merged user
        assert_equal(other_user.merged_by, self.user)

        assert_equal(
            0,
            models.Session.find(
                Q('data.auth_user_id', 'eq', other_user._id)
            ).count()
        )

Example 148

Project: DIRAC Source File: Test_FilePlugin.py
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem._StorageElementItem__isLocalSE',
                return_value = S_OK( True ) )  # Pretend it's local
  @mock.patch( 'DIRAC.Resources.Storage.StorageElement.StorageElementItem.addAccountingOperation',
                return_value = None )  # Don't send accounting
  def test_04_putDirectory( self, mk_isLocalSE, mk_addAccounting ):
    """Testing putDirectory"""

    nonExistingDir = '/test/forsuredoesnotexist'
    localdirs = ['/test', nonExistingDir]

    # Correct size
    res = self.se.putDirectory( { '/test' : self.srcPath} )
    self.assertTrue( res['OK'], res )
    self.assertTrue( '/test' in res['Value']['Successful'], res )
    self.assertEqual( res['Value']['Successful']['/test'], {'Files': 2, 'Size': self.existingFileSize + self.subFileSize} )
    self.assertTrue( os.path.exists( self.basePath + '/test' ) )
    self.assertTrue( os.path.exists( self.basePath + self.existingFile ) )
    self.assertTrue( os.path.exists( self.basePath + self.subFile ) )


    # No existing source directory
    res = self.se.putDirectory( { '/test' : nonExistingDir} )
    self.assertTrue( res['OK'], res )
    self.assertTrue( '/test' in res['Value']['Failed'], res )
    self.assertEqual( res['Value']['Failed']['/test'], {'Files': 0, 'Size': 0} )

    # sub file
    res = self.se.putDirectory( { '/test' : self.existingFile} )
    self.assertTrue( res['OK'], res )
    self.assertTrue( '/test' in res['Value']['Failed'], res )
    self.assertEqual( res['Value']['Failed']['/test'], {'Files': 0, 'Size': 0} )


    res = self.se.exists( self.DIRECTORIES + localdirs )
    self.assertTrue( res['OK'], res )
    self.assertTrue( not res['Value']['Failed'], res )
    self.assertTrue( res['Value']['Successful'][self.subDir], res )
    self.assertTrue( not res['Value']['Successful'][nonExistingDir], res )

    res = self.se.getDirectorySize( self.ALL + localdirs )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.subDir], { 'Files' : 1, 'Size' : self.subFileSize, 'SubDirs' : 0 } )
    self.assertEqual( res['Value']['Successful']['/test'], { 'Files' : 1, 'Size' : self.existingFileSize, 'SubDirs' : 1 } )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assertTrue( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )


    res = self.se.getDirectoryMetadata( self.ALL + localdirs )
    self.assertTrue( res['OK'], res )
    self.assertTrue( self.subDir in res['Value']['Successful'] )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )
    self.assertTrue( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )


    res = self.se.isDirectory( self.ALL + localdirs )
    self.assertTrue( res['OK'], res )
    self.assertTrue( not res['Value']['Successful'][self.existingFile] )
    self.assertTrue( res['Value']['Successful'][self.subDir], res )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )

    res = self.se.listDirectory( self.ALL + localdirs )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.subDir], {'Files': [self.subFile], 'SubDirs': []} )
    self.assertEqual( res['Value']['Successful']['/test'], {'Files': [self.existingFile], 'SubDirs': [self.subDir]} )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][self.nonExistingFile], res )
    self.assertTrue( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )
    self.assertTrue( os.strerror( errno.ENOENT ) in res['Value']['Failed'][nonExistingDir], res )


    res = self.se.getDirectory( self.ALL + localdirs, localPath = self.destPath )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful']['/test'], {'Files' : 2, 'Size' : self.existingFileSize + self.subFileSize} )
    self.assertTrue( os.path.exists( self.destPath + self.existingFile ) )
    self.assertTrue( os.path.exists( self.destPath + self.subFile ) )
    self.assertEqual( res['Value']['Successful'][self.subDir], {'Files' : 1, 'Size' : self.subFileSize} )
    self.assertTrue( os.path.exists( self.destPath + self.subFile.replace( '/test', '' ) ) )
    self.assertEqual( res['Value']['Failed'][self.nonExistingFile], {'Files': 0, 'Size': 0} )
    self.assertEqual( res['Value']['Failed'][self.existingFile], {'Files': 0, 'Size': 0} )
    self.assertEqual( res['Value']['Failed'][nonExistingDir], {'Files': 0, 'Size': 0} )


    res = self.se.removeDirectory( nonExistingDir, recursive = False )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][nonExistingDir], True )

    res = self.se.removeDirectory( nonExistingDir, recursive = True )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Failed'][nonExistingDir], {'FilesRemoved':0, 'SizeRemoved':0} )


    res = self.se.removeDirectory( self.nonExistingFile, recursive = False )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful'][self.nonExistingFile], True )

    res = self.se.removeDirectory( self.nonExistingFile, recursive = True )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Failed'][self.nonExistingFile], {'FilesRemoved':0, 'SizeRemoved':0} )


    res = self.se.removeDirectory( self.existingFile, recursive = False )
    self.assertTrue( res['OK'], res )
    self.assertTrue( os.strerror( errno.ENOTDIR ) in res['Value']['Failed'][self.existingFile], res )

    res = self.se.removeDirectory( self.existingFile, recursive = True )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Failed'][self.existingFile], {'FilesRemoved':0, 'SizeRemoved':0} )


    res = self.se.removeDirectory( '/test', recursive = False )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful']['/test'], True )
    self.assertTrue( not os.path.exists( self.basePath + self.existingFile ) )
    self.assertTrue( os.path.exists( self.basePath + self.subFile ) )

    res = self.se.removeDirectory( '/test', recursive = True )
    self.assertTrue( res['OK'], res )
    self.assertEqual( res['Value']['Successful']['/test'], {'FilesRemoved':1, 'SizeRemoved':self.subFileSize} )
    self.assertTrue( not os.path.exists( self.basePath + '/test' ) )

Example 149

Project: logbook Source File: test_mail_handler.py
def test_mail_handler_arguments():
    with patch('smtplib.SMTP', autospec=True) as mock_smtp:

        # Test the mail handler with supported arguments before changes to
        # secure, credentials, and starttls
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr=('server.example.com', 465),
            credentials=('username', 'password'),
            secure=('keyfile', 'certfile'))

        mail_handler.get_connection()

        assert mock_smtp.call_args == call('server.example.com', 465)
        assert mock_smtp.method_calls[1] == call().starttls(
            keyfile='keyfile', certfile='certfile')
        assert mock_smtp.method_calls[3] == call().login('username', 'password')

        # Test secure=()
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr=('server.example.com', 465),
            credentials=('username', 'password'),
            secure=())

        mail_handler.get_connection()

        assert mock_smtp.call_args == call('server.example.com', 465)
        assert mock_smtp.method_calls[5] == call().starttls(
            certfile=None, keyfile=None)
        assert mock_smtp.method_calls[7] == call().login('username', 'password')

        # Test implicit port with string server_addr, dictionary credentials,
        # dictionary secure.
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr='server.example.com',
            credentials={'user': 'username', 'password': 'password'},
            secure={'certfile': 'certfile2', 'keyfile': 'keyfile2'})

        mail_handler.get_connection()

        assert mock_smtp.call_args == call('server.example.com', 465)
        assert mock_smtp.method_calls[9] == call().starttls(
            certfile='certfile2', keyfile='keyfile2')
        assert mock_smtp.method_calls[11] == call().login(
            user='username', password='password')

        # Test secure=True
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr=('server.example.com', 465),
            credentials=('username', 'password'),
            secure=True)

        mail_handler.get_connection()

        assert mock_smtp.call_args == call('server.example.com', 465)
        assert mock_smtp.method_calls[13] == call().starttls(
            certfile=None, keyfile=None)
        assert mock_smtp.method_calls[15] == call().login('username', 'password')
        assert len(mock_smtp.method_calls) == 16

        # Test secure=False
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr=('server.example.com', 465),
            credentials=('username', 'password'),
            secure=False)

        mail_handler.get_connection()

        # starttls not called because we check len of method_calls before and
        # after this test.
        assert mock_smtp.call_args == call('server.example.com', 465)
        assert mock_smtp.method_calls[16] == call().login('username', 'password')
        assert len(mock_smtp.method_calls) == 17

    with patch('smtplib.SMTP_SSL', autospec=True) as mock_smtp_ssl:
        # Test starttls=False
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr='server.example.com',
            credentials={'user': 'username', 'password': 'password'},
            secure={'certfile': 'certfile', 'keyfile': 'keyfile'},
            starttls=False)

        mail_handler.get_connection()

        assert mock_smtp_ssl.call_args == call(
            'server.example.com', 465, keyfile='keyfile', certfile='certfile')
        assert mock_smtp_ssl.method_calls[0] == call().login(
            user='username', password='password')

        # Test starttls=False with secure=True
        mail_handler = logbook.MailHandler(
            from_addr='[email protected]',
            recipients='[email protected]',
            server_addr='server.example.com',
            credentials={'user': 'username', 'password': 'password'},
            secure=True,
            starttls=False)

        mail_handler.get_connection()

        assert mock_smtp_ssl.call_args == call(
            'server.example.com', 465, keyfile=None, certfile=None)
        assert mock_smtp_ssl.method_calls[1] == call().login(
            user='username', password='password')

Example 150

Project: allura Source File: 012-uninstall-home.py
def main():
    test = sys.argv[-1] == 'test'
    log.info('Removing "home" tools')
    affected_projects = 0
    possibly_orphaned_projects = 0
    solr_delete = Mock()
    notification_post = Mock()
    for some_projects in utils.chunked_find(M.Project, {'neighborhood_id': {
            '$ne': ObjectId("4be2faf8898e33156f00003e")}}):
        for project in some_projects:
            c.project = project
            old_home_app = project.app_instance('home')
            if isinstance(old_home_app, ProjectHomeApp):

                # would we actually be able to install a wiki?
                if M.ProjectRole.by_name('Admin') is None:
                    log.warning('project %s may be orphaned' %
                                project.shortname)
                    possibly_orphaned_projects += 1
                    continue

                affected_projects += 1

                # remove the existing home tool
                if test:
                    log.info('would remove "home" tool from project ' +
                             project.shortname)
                else:
                    log.info('removing "home" tool from project ' +
                             project.shortname)
                    with patch('allura.app.g.solr.delete', solr_delete):
                        project.uninstall_app('home')

                # ...and put a Wiki in its place (note we only create a Wiki if we deleted the old home)
                if test:
                    log.info('would create Wiki "home" for project ' +
                             project.shortname)
                else:
                    log.info('creating Wiki "home" for project ' +
                             project.shortname)
                    home_title = project.homepage_title or 'Home'
                    wiki_text = project.description or ''
                    if wiki_text == 'You can edit this description in the admin page':
                        wiki_text = 'You can edit this description'

                    # re-number all the mounts so the new Wiki comes first
                    mounts = project.ordered_mounts()
                    with patch('forgewiki.model.wiki.Notification.post', notification_post):
                        new_home_app = project.install_app(
                            'Wiki', 'home', 'Home')
                    mounts = [{'ordinal': 0, 'ac': new_home_app.config}] + \
                        mounts
                    for i, mount in enumerate(mounts):
                        if 'ac' in mount:
                            mount['ac'].options['ordinal'] = i
                            session(mount['ac']).flush()
                        elif 'sub' in mount:
                            mount['sub'].ordinal = i
                            session(mount['sub']).flush()

                    # make it look as much like the old home tool as possible
                    new_home_app.config.options['show_left_bar'] = False
                    new_home_app.config.options['show_discussion'] = False

                    # now let's fix the home page itself
                    log.info('updating home page to "%s"' % home_title)
                    new_home_page = WM.Page.query.find(
                        dict(app_config_id=new_home_app.config._id)).first()
                    with h.push_config(c, app=new_home_app):
                        if new_home_page is None:
                            # weird: we didn't find the existing home page
                            log.warning(
                                'hmmm, actually creating the home page ("%s") for project "%s" from scratch' %
                                (home_title, project.shortname))
                            new_home_page = WM.Page.upsert(home_title)
                            new_home_page.viewable_by = ['all']
                        new_home_page.title = home_title
                        new_home_page.text = wiki_text
                        with patch('forgewiki.model.wiki.Notification.post', notification_post):
                            new_home_page.commit()
                    assert new_home_page is not None
                    assert new_home_page.title == home_title
                    assert new_home_page.version == 2

                    # if we changed the home page name, make sure the Wiki
                    # knows that's the root page
                    new_home_app.root_page_name = home_title

                session(project).flush()
            session(project).clear()
    if test:
        log.info('%s projects would be updated' % affected_projects)
    else:
        log.info('%s projects were updated' % affected_projects)
    if possibly_orphaned_projects:
        log.warning('%s possibly orphaned projects found' %
                    possibly_orphaned_projects)
    if not test:
        assert solr_delete.call_count == affected_projects, solr_delete.call_count
        assert notification_post.call_count == 2 * \
            affected_projects, notification_post.call_count
See More Examples - Go to Next Page
Page 1 Page 2 Page 3 Selected Page 4