__init__.py 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105
  1. """
  2. Base class for Scrapy spiders
  3. See documentation in docs/topics/spiders.rst
  4. """
  5. import logging
  6. import warnings
  7. from scrapy import signals
  8. from scrapy.http import Request
  9. from scrapy.utils.trackref import object_ref
  10. from scrapy.utils.url import url_is_from_spider
  11. from scrapy.exceptions import ScrapyDeprecationWarning
  12. from scrapy.utils.deprecate import method_is_overridden
  13. class Spider(object_ref):
  14. """Base class for scrapy spiders. All spiders must inherit from this
  15. class.
  16. """
  17. name = None
  18. custom_settings = None
  19. def __init__(self, name=None, **kwargs):
  20. if name is not None:
  21. self.name = name
  22. elif not getattr(self, 'name', None):
  23. raise ValueError("%s must have a name" % type(self).__name__)
  24. self.__dict__.update(kwargs)
  25. if not hasattr(self, 'start_urls'):
  26. self.start_urls = []
  27. @property
  28. def logger(self):
  29. logger = logging.getLogger(self.name)
  30. return logging.LoggerAdapter(logger, {'spider': self})
  31. def log(self, message, level=logging.DEBUG, **kw):
  32. """Log the given message at the given log level
  33. This helper wraps a log call to the logger within the spider, but you
  34. can use it directly (e.g. Spider.logger.info('msg')) or use any other
  35. Python logger too.
  36. """
  37. self.logger.log(level, message, **kw)
  38. @classmethod
  39. def from_crawler(cls, crawler, *args, **kwargs):
  40. spider = cls(*args, **kwargs)
  41. spider._set_crawler(crawler)
  42. return spider
  43. def _set_crawler(self, crawler):
  44. self.crawler = crawler
  45. self.settings = crawler.settings
  46. crawler.signals.connect(self.close, signals.spider_closed)
  47. def start_requests(self):
  48. cls = self.__class__
  49. if method_is_overridden(cls, Spider, 'make_requests_from_url'):
  50. warnings.warn(
  51. "Spider.make_requests_from_url method is deprecated; it "
  52. "won't be called in future Scrapy releases. Please "
  53. "override Spider.start_requests method instead (see %s.%s)." % (
  54. cls.__module__, cls.__name__
  55. ),
  56. )
  57. for url in self.start_urls:
  58. yield self.make_requests_from_url(url)
  59. else:
  60. for url in self.start_urls:
  61. yield Request(url, dont_filter=True)
  62. def make_requests_from_url(self, url):
  63. """ This method is deprecated. """
  64. return Request(url, dont_filter=True)
  65. def parse(self, response):
  66. raise NotImplementedError('{}.parse callback is not defined'.format(self.__class__.__name__))
  67. @classmethod
  68. def update_settings(cls, settings):
  69. settings.setdict(cls.custom_settings or {}, priority='spider')
  70. @classmethod
  71. def handles_request(cls, request):
  72. return url_is_from_spider(request.url, cls)
  73. @staticmethod
  74. def close(spider, reason):
  75. closed = getattr(spider, 'closed', None)
  76. if callable(closed):
  77. return closed(reason)
  78. def __str__(self):
  79. return "<%s %r at 0x%0x>" % (type(self).__name__, self.name, id(self))
  80. __repr__ = __str__
  81. # Top-level imports
  82. from scrapy.spiders.crawl import CrawlSpider, Rule
  83. from scrapy.spiders.feed import XMLFeedSpider, CSVFeedSpider
  84. from scrapy.spiders.sitemap import SitemapSpider