COOKIES_ENABLED
进入 scrapy 包 -> downloadermiddlewares 子包 -> cookies模块中
class CookiesMiddleware: """This middleware enables working with sites that need cookies""" # 使用该中间件可以处理需要Cookie的网站 def __init__(self, debug=False): self.jars = defaultdict(CookieJar) self.debug = debug @classmethod def from_crawler(cls, crawler): # 这是一个组件,作用于所有的scrapy Request # 通过这个组件,提取前一个Request中的cookie,并加入下一个Request cookie中去 if not crawler.settings.getbool('COOKIES_ENABLED'): raise NotConfigured return cls(crawler.settings.getbool('COOKIES_DEBUG')) def process_request(self, request, spider): if request.meta.get('dont_merge_cookies', False): return