Here are the examples of the python api scrapy.utils.datatypes.SequenceExclude taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
3 Examples
3
Source : media.py
with MIT License
from autofelix
with MIT License
from autofelix
def _handle_statuses(self, allow_redirects):
self.handle_httpstatus_list = None
if allow_redirects:
self.handle_httpstatus_list = SequenceExclude(range(300, 400))
def _key_for_pipe(self, key, base_class_name=None,
3
Source : shell.py
with MIT License
from autofelix
with MIT License
from autofelix
def fetch(self, request_or_url, spider=None, redirect=True, **kwargs):
if isinstance(request_or_url, Request):
request = request_or_url
else:
url = any_to_uri(request_or_url)
request = Request(url, dont_filter=True, **kwargs)
if redirect:
request.meta['handle_httpstatus_list'] = SequenceExclude(range(300, 400))
else:
request.meta['handle_httpstatus_all'] = True
response = None
try:
response, spider = threads.blockingCallFromThread(
reactor, self._schedule, request, spider)
except IgnoreRequest:
pass
self.populate_vars(response, request, spider)
def populate_vars(self, response=None, request=None, spider=None):
0
Source : fetch.py
with MIT License
from autofelix
with MIT License
from autofelix
def run(self, args, opts):
if len(args) != 1 or not is_url(args[0]):
raise UsageError()
cb = lambda x: self._print_response(x, opts)
request = Request(args[0], callback=cb, dont_filter=True)
# by default, let the framework handle redirects,
# i.e. command handles all codes expect 3xx
if not opts.no_redirect:
request.meta['handle_httpstatus_list'] = SequenceExclude(range(300, 400))
else:
request.meta['handle_httpstatus_all'] = True
spidercls = DefaultSpider
spider_loader = self.crawler_process.spider_loader
if opts.spider:
spidercls = spider_loader.load(opts.spider)
else:
spidercls = spidercls_for_request(spider_loader, request, spidercls)
self.crawler_process.crawl(spidercls, start_requests=lambda: [request])
self.crawler_process.start()