scrapy.pipelines.files.S3FilesStore

Here are the examples of the python api scrapy.pipelines.files.S3FilesStore taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

1 Examples 7

Example 1

Project: scrapy Source File: test_pipeline_files.py
    @defer.inlineCallbacks
    def test_persist(self):
        assert_aws_environ()
        uri = os.environ.get('S3_TEST_FILE_URI')
        if not uri:
            raise unittest.SkipTest("No S3 URI available for testing")
        data = b"TestS3FilesStore: \xe2\x98\x83"
        buf = BytesIO(data)
        meta = {'foo': 'bar'}
        path = ''
        store = S3FilesStore(uri)
        yield store.persist_file(
            path, buf, info=None, meta=meta,
            headers={'Content-Type': 'image/png'})
        s = yield store.stat_file(path, info=None)
        self.assertIn('last_modified', s)
        self.assertIn('checksum', s)
        self.assertEqual(s['checksum'], '3187896a9657a28163abb31667df64c8')
        u = urlparse(uri)
        content, key = get_s3_content_and_delete(
            u.hostname, u.path[1:], with_key=True)
        self.assertEqual(content, data)
        if is_botocore():
            self.assertEqual(key['Metadata'], {'foo': 'bar'})
            self.assertEqual(
                key['CacheControl'], S3FilesStore.HEADERS['Cache-Control'])
            self.assertEqual(key['ContentType'], 'image/png')
        else:
            self.assertEqual(key.metadata, {'foo': 'bar'})
            self.assertEqual(
                key.cache_control, S3FilesStore.HEADERS['Cache-Control'])
            self.assertEqual(key.content_type, 'image/png')