unittest.mock.Mock.return_value

Here are the examples of the python api unittest.mock.Mock.return_value taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

5 Examples 7

3 Source : test_fastsync_tap_mongodb.py
with Apache License 2.0
from transferwise

    def test_copy_table_with_collection_not_found_expect_exception(self):
        """
        Test copy_table method with a collection name that's not found in the db, thus raising a TableNotFoundError
        error
        """
        self.mongo.database = Mock(spec_set=Database).return_value
        self.mongo.database.list_collection_names.return_value = [
            'col1',
            'col2',
            'col3',
        ]

        with self.assertRaises(TableNotFoundError):
            self.mongo.copy_table('my_col', TEST_EXPORT_FILE, 'tmp')

        self.assertEqual(self.mongo.database.list_collection_names.call_count, 1)

    def test_copy_table_with_collection_found_but_export_failed_expect_exception(self):

0 Source : test_fastsync_tap_mongodb.py
with Apache License 2.0
from transferwise

    def test_copy_table_with_collection_found_but_export_failed_expect_exception(self):
        """
        Test copy_table method with a collection name that's not found in the db, thus raising a TableNotFoundError
        error
        """
        self.mongo.database = Mock(spec_set=Database).return_value
        self.mongo.database.list_collection_names.return_value = [
            'col1',
            'col2',
            'col3',
            'my_col',
        ]

        with patch(
            'pipelinewise.fastsync.commons.tap_mongodb.subprocess.call'
        ) as call_mock:
            call_mock.return_value = 1

            with self.assertRaises(ExportError):
                self.mongo.copy_table('my_col', TEST_EXPORT_FILE, 'tmp')

            call_mock.assert_called_once_with(
                [
                    'mongodump',
                    '--uri',
                    '"mongodb://my_user:[email protected]:3306/my_db'
                    '?readPreference=secondaryPreferred&authSource=admin&ssl=true"',
                    '--forceTableScan',
                    '--gzip',
                    '-c',
                    'my_col',
                    '-o',
                    'tmp',
                ]
            )

        self.assertEqual(self.mongo.database.list_collection_names.call_count, 1)

    def test_copy_table_with_collection_found_success(self):

0 Source : test_fastsync_tap_mongodb.py
with Apache License 2.0
from transferwise

    def test_copy_table_with_collection_found_success(self):
        """
        Test copy_table method with a collection name that's in the db, the copy should continue successfully
        """
        self.mongo.database = Mock(spec_set=Database).return_value
        self.mongo.database.list_collection_names.return_value = [
            'col1',
            'col2',
            'col3',
            'my_col',
        ]

        with patch(
            'pipelinewise.fastsync.commons.tap_mongodb.subprocess.call'
        ) as call_mock:
            call_mock.return_value = 0

            with patch(
                'pipelinewise.fastsync.commons.tap_mongodb.os.remove'
            ) as os_remove_mock:
                os_remove_mock.return_value = True

                with patch(
                    'pipelinewise.fastsync.commons.tap_mongodb.gzip'
                ) as gzip_mock:
                    mock_enter = Mock()

                    with patch(
                        'pipelinewise.fastsync.commons.tap_mongodb.bson.decode_file_iter'
                    ) as bson_decode_iter_mock:

                        # generate 10 documents
                        bson_decode_iter_mock.return_value = (
                            generate_all_datatypes_doc() for _ in range(10)
                        )

                        mock_enter.return_value.open.return_value = Mock()

                        gzip_mock.return_value.__enter__ = mock_enter
                        gzip_mock.return_value.__exit__ = Mock()

                        self.mongo.copy_table('my_col', TEST_EXPORT_FILE, 'tmp')

                        call_mock.assert_called_once_with(
                            [
                                'mongodump',
                                '--uri',
                                '"mongodb://my_user:[email protected]:3306/my_db'
                                '?readPreference=secondaryPreferred&authSource=admin&ssl=true"',
                                '--forceTableScan',
                                '--gzip',
                                '-c',
                                'my_col',
                                '-o',
                                'tmp',
                            ]
                        )

                        os_remove_mock.assert_has_calls(
                            [
                                call('tmp/my_db/my_col.metadata.json.gz'),
                                call('tmp/my_db/my_col.bson.gz'),
                            ]
                        )
                        self.assertEqual(os_remove_mock.call_count, 2)
                        self.assertEqual(bson_decode_iter_mock.call_count, 1)

    def test_fetch_current_log_pos_return_first_token(self):

0 Source : test_fastsync_tap_mongodb.py
with Apache License 2.0
from transferwise

    def test_fetch_current_log_pos_return_first_token(self):
        """
        Test fetch_current_log_pos should return the the first encountered token
        """
        cursor_mock = Mock(spec_set=DatabaseChangeStream).return_value
        type(cursor_mock).alive = PropertyMock(return_value=True)
        type(cursor_mock).resume_token = PropertyMock(
            side_effect=[
                {'_data': 'token1', '_typeBits': b'\x81\x80'},
                {
                    '_data': 'token2',
                },
                {'_data': 'token3'},
                {'_data': 'token4'},
            ]
        )
        cursor_mock.try_next.side_effect = [{}, {}, {}]

        mock_enter = Mock()
        mock_enter.return_value = cursor_mock

        mock_watch = Mock().return_value
        mock_watch.__enter__ = mock_enter
        mock_watch.__exit__ = Mock()

        self.mongo.database = Mock(spec_set=Database).return_value
        self.mongo.database.watch.return_value = mock_watch

        self.assertDictEqual(
            {'token': {'_data': 'token1'}}, self.mongo.fetch_current_log_pos()
        )

    def test_fetch_current_incremental_key_pos(self):

0 Source : test_fastsync_tap_s3_csv.py
with Apache License 2.0
from transferwise

    def test_get_file_records(self):
        with patch.object(S3Helper, 'get_file_handle') as get_file_handle_mock:
            handle = Mock().return_value
            handle.configure_mock(**{'_raw_stream.return_value': 'file handle'})

            get_file_handle_mock.return_value = handle.return_value

            with patch(
                'pipelinewise.fastsync.commons.tap_s3_csv.singer_encodings_csv'
            ) as singer_encodings_csv_mock:
                singer_encodings_csv_mock.get_row_iterator.return_value = [
                    {
                        'id': 1,
                        'group': 'A',
                    },
                    {'id': 2, 'group': 'A', 'test': True},
                    {
                        'id': 3,
                        'group': 'B',
                    },
                ]

                with patch(
                    'pipelinewise.fastsync.commons.tap_s3_csv.datetime'
                ) as datetime_mock:
                    datetime_mock.utcnow.return_value.strftime.return_value = (
                        '2019-11-21'
                    )

                    records = []
                    headers = set()

                    self.fs_tap_s3_csv._get_file_records(
                        's3 path 1', {}, records, headers
                    )

                    self.assertListEqual(
                        [
                            {
                                S3Helper.SDC_SOURCE_BUCKET_COLUMN: 'testBucket',
                                S3Helper.SDC_SOURCE_FILE_COLUMN: 's3 path 1',
                                S3Helper.SDC_SOURCE_LINENO_COLUMN: 1,
                                '_SDC_EXTRACTED_AT': '2019-11-21',
                                '_SDC_BATCHED_AT': '2019-11-21',
                                '_SDC_DELETED_AT': None,
                                '"ID"': 1,
                                '"GROUP"': 'A',
                            },
                            {
                                S3Helper.SDC_SOURCE_BUCKET_COLUMN: 'testBucket',
                                S3Helper.SDC_SOURCE_FILE_COLUMN: 's3 path 1',
                                S3Helper.SDC_SOURCE_LINENO_COLUMN: 2,
                                '_SDC_EXTRACTED_AT': '2019-11-21',
                                '_SDC_BATCHED_AT': '2019-11-21',
                                '_SDC_DELETED_AT': None,
                                '"ID"': 2,
                                '"GROUP"': 'A',
                                '"TEST"': True,
                            },
                            {
                                S3Helper.SDC_SOURCE_BUCKET_COLUMN: 'testBucket',
                                S3Helper.SDC_SOURCE_FILE_COLUMN: 's3 path 1',
                                S3Helper.SDC_SOURCE_LINENO_COLUMN: 3,
                                '_SDC_EXTRACTED_AT': '2019-11-21',
                                '_SDC_BATCHED_AT': '2019-11-21',
                                '_SDC_DELETED_AT': None,
                                '"ID"': 3,
                                '"GROUP"': 'B',
                            },
                        ],
                        records,
                    )

                    self.assertSetEqual(
                        {
                            '"ID"',
                            '"GROUP"',
                            '"TEST"',
                            S3Helper.SDC_SOURCE_LINENO_COLUMN,
                            S3Helper.SDC_SOURCE_FILE_COLUMN,
                            S3Helper.SDC_SOURCE_BUCKET_COLUMN,
                            '_SDC_EXTRACTED_AT',
                            '_SDC_BATCHED_AT',
                            '_SDC_DELETED_AT',
                        },
                        headers,
                    )

    def test_fetch_current_incremental_key_pos_with_no_tables_in_dictionary_returns_empty_dict(