memusage.py 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. """
  2. MemoryUsage extension
  3. See documentation in docs/topics/extensions.rst
  4. """
  5. import sys
  6. import socket
  7. import logging
  8. from pprint import pformat
  9. from importlib import import_module
  10. from twisted.internet import task
  11. from scrapy import signals
  12. from scrapy.exceptions import NotConfigured
  13. from scrapy.mail import MailSender
  14. from scrapy.utils.engine import get_engine_status
  15. logger = logging.getLogger(__name__)
  16. class MemoryUsage(object):
  17. def __init__(self, crawler):
  18. if not crawler.settings.getbool('MEMUSAGE_ENABLED'):
  19. raise NotConfigured
  20. try:
  21. # stdlib's resource module is only available on unix platforms.
  22. self.resource = import_module('resource')
  23. except ImportError:
  24. raise NotConfigured
  25. self.crawler = crawler
  26. self.warned = False
  27. self.notify_mails = crawler.settings.getlist('MEMUSAGE_NOTIFY_MAIL')
  28. self.limit = crawler.settings.getint('MEMUSAGE_LIMIT_MB')*1024*1024
  29. self.warning = crawler.settings.getint('MEMUSAGE_WARNING_MB')*1024*1024
  30. self.check_interval = crawler.settings.getfloat('MEMUSAGE_CHECK_INTERVAL_SECONDS')
  31. self.mail = MailSender.from_settings(crawler.settings)
  32. crawler.signals.connect(self.engine_started, signal=signals.engine_started)
  33. crawler.signals.connect(self.engine_stopped, signal=signals.engine_stopped)
  34. @classmethod
  35. def from_crawler(cls, crawler):
  36. return cls(crawler)
  37. def get_virtual_size(self):
  38. size = self.resource.getrusage(self.resource.RUSAGE_SELF).ru_maxrss
  39. if sys.platform != 'darwin':
  40. # on Mac OS X ru_maxrss is in bytes, on Linux it is in KB
  41. size *= 1024
  42. return size
  43. def engine_started(self):
  44. self.crawler.stats.set_value('memusage/startup', self.get_virtual_size())
  45. self.tasks = []
  46. tsk = task.LoopingCall(self.update)
  47. self.tasks.append(tsk)
  48. tsk.start(self.check_interval, now=True)
  49. if self.limit:
  50. tsk = task.LoopingCall(self._check_limit)
  51. self.tasks.append(tsk)
  52. tsk.start(self.check_interval, now=True)
  53. if self.warning:
  54. tsk = task.LoopingCall(self._check_warning)
  55. self.tasks.append(tsk)
  56. tsk.start(self.check_interval, now=True)
  57. def engine_stopped(self):
  58. for tsk in self.tasks:
  59. if tsk.running:
  60. tsk.stop()
  61. def update(self):
  62. self.crawler.stats.max_value('memusage/max', self.get_virtual_size())
  63. def _check_limit(self):
  64. if self.get_virtual_size() > self.limit:
  65. self.crawler.stats.set_value('memusage/limit_reached', 1)
  66. mem = self.limit/1024/1024
  67. logger.error("Memory usage exceeded %(memusage)dM. Shutting down Scrapy...",
  68. {'memusage': mem}, extra={'crawler': self.crawler})
  69. if self.notify_mails:
  70. subj = "%s terminated: memory usage exceeded %dM at %s" % \
  71. (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())
  72. self._send_report(self.notify_mails, subj)
  73. self.crawler.stats.set_value('memusage/limit_notified', 1)
  74. open_spiders = self.crawler.engine.open_spiders
  75. if open_spiders:
  76. for spider in open_spiders:
  77. self.crawler.engine.close_spider(spider, 'memusage_exceeded')
  78. else:
  79. self.crawler.stop()
  80. def _check_warning(self):
  81. if self.warned: # warn only once
  82. return
  83. if self.get_virtual_size() > self.warning:
  84. self.crawler.stats.set_value('memusage/warning_reached', 1)
  85. mem = self.warning/1024/1024
  86. logger.warning("Memory usage reached %(memusage)dM",
  87. {'memusage': mem}, extra={'crawler': self.crawler})
  88. if self.notify_mails:
  89. subj = "%s warning: memory usage reached %dM at %s" % \
  90. (self.crawler.settings['BOT_NAME'], mem, socket.gethostname())
  91. self._send_report(self.notify_mails, subj)
  92. self.crawler.stats.set_value('memusage/warning_notified', 1)
  93. self.warned = True
  94. def _send_report(self, rcpts, subject):
  95. """send notification mail with some additional useful info"""
  96. stats = self.crawler.stats
  97. s = "Memory usage at engine startup : %dM\r\n" % (stats.get_value('memusage/startup')/1024/1024)
  98. s += "Maximum memory usage : %dM\r\n" % (stats.get_value('memusage/max')/1024/1024)
  99. s += "Current memory usage : %dM\r\n" % (self.get_virtual_size()/1024/1024)
  100. s += "ENGINE STATUS ------------------------------------------------------- \r\n"
  101. s += "\r\n"
  102. s += pformat(get_engine_status(self.crawler.engine))
  103. s += "\r\n"
  104. self.mail.send(rcpts, subject, s)