logformatter.py 3.8 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697
  1. import os
  2. import logging
  3. from twisted.python.failure import Failure
  4. from scrapy.utils.request import referer_str
  5. SCRAPEDMSG = u"Scraped from %(src)s" + os.linesep + "%(item)s"
  6. DROPPEDMSG = u"Dropped: %(exception)s" + os.linesep + "%(item)s"
  7. CRAWLEDMSG = u"Crawled (%(status)s) %(request)s%(request_flags)s (referer: %(referer)s)%(response_flags)s"
  8. class LogFormatter(object):
  9. """Class for generating log messages for different actions.
  10. All methods must return a dictionary listing the parameters ``level``, ``msg``
  11. and ``args`` which are going to be used for constructing the log message when
  12. calling ``logging.log``.
  13. Dictionary keys for the method outputs:
  14. * ``level`` is the log level for that action, you can use those from the
  15. `python logging library <https://docs.python.org/3/library/logging.html>`_ :
  16. ``logging.DEBUG``, ``logging.INFO``, ``logging.WARNING``, ``logging.ERROR``
  17. and ``logging.CRITICAL``.
  18. * ``msg`` should be a string that can contain different formatting placeholders.
  19. This string, formatted with the provided ``args``, is going to be the long message
  20. for that action.
  21. * ``args`` should be a tuple or dict with the formatting placeholders for ``msg``.
  22. The final log message is computed as ``msg % args``.
  23. Users can define their own ``LogFormatter`` class if they want to customize how
  24. each action is logged or if they want to omit it entirely. In order to omit
  25. logging an action the method must return ``None``.
  26. Here is an example on how to create a custom log formatter to lower the severity level of
  27. the log message when an item is dropped from the pipeline::
  28. class PoliteLogFormatter(logformatter.LogFormatter):
  29. def dropped(self, item, exception, response, spider):
  30. return {
  31. 'level': logging.INFO, # lowering the level from logging.WARNING
  32. 'msg': u"Dropped: %(exception)s" + os.linesep + "%(item)s",
  33. 'args': {
  34. 'exception': exception,
  35. 'item': item,
  36. }
  37. }
  38. """
  39. def crawled(self, request, response, spider):
  40. """Logs a message when the crawler finds a webpage."""
  41. request_flags = ' %s' % str(request.flags) if request.flags else ''
  42. response_flags = ' %s' % str(response.flags) if response.flags else ''
  43. return {
  44. 'level': logging.DEBUG,
  45. 'msg': CRAWLEDMSG,
  46. 'args': {
  47. 'status': response.status,
  48. 'request': request,
  49. 'request_flags': request_flags,
  50. 'referer': referer_str(request),
  51. 'response_flags': response_flags,
  52. # backward compatibility with Scrapy logformatter below 1.4 version
  53. 'flags': response_flags
  54. }
  55. }
  56. def scraped(self, item, response, spider):
  57. """Logs a message when an item is scraped by a spider."""
  58. if isinstance(response, Failure):
  59. src = response.getErrorMessage()
  60. else:
  61. src = response
  62. return {
  63. 'level': logging.DEBUG,
  64. 'msg': SCRAPEDMSG,
  65. 'args': {
  66. 'src': src,
  67. 'item': item,
  68. }
  69. }
  70. def dropped(self, item, exception, response, spider):
  71. """Logs a message when an item is dropped while it is passing through the item pipeline."""
  72. return {
  73. 'level': logging.WARNING,
  74. 'msg': DROPPEDMSG,
  75. 'args': {
  76. 'exception': exception,
  77. 'item': item,
  78. }
  79. }
  80. @classmethod
  81. def from_crawler(cls, crawler):
  82. return cls()