Source code for scrapy.statscollectors

"""
Scrapy extension for collecting scraping stats
"""
import pprint
import logging

logger = logging.getLogger(__name__)


[docs]class StatsCollector: def __init__(self, crawler): self._dump = crawler.settings.getbool('STATS_DUMP') self._stats = {}
[docs] def get_value(self, key, default=None, spider=None): return self._stats.get(key, default)
[docs] def get_stats(self, spider=None): return self._stats
[docs] def set_value(self, key, value, spider=None): self._stats[key] = value
[docs] def set_stats(self, stats, spider=None): self._stats = stats
[docs] def inc_value(self, key, count=1, start=0, spider=None): d = self._stats d[key] = d.setdefault(key, start) + count
[docs] def max_value(self, key, value, spider=None): self._stats[key] = max(self._stats.setdefault(key, value), value)
[docs] def min_value(self, key, value, spider=None): self._stats[key] = min(self._stats.setdefault(key, value), value)
[docs] def clear_stats(self, spider=None): self._stats.clear()
[docs] def open_spider(self, spider): pass
[docs] def close_spider(self, spider, reason): if self._dump: logger.info("Dumping Scrapy stats:\n" + pprint.pformat(self._stats), extra={'spider': spider}) self._persist_stats(self._stats, spider)
def _persist_stats(self, stats, spider): pass
[docs]class MemoryStatsCollector(StatsCollector): def __init__(self, crawler): super().__init__(crawler) self.spider_stats = {} def _persist_stats(self, stats, spider): self.spider_stats[spider.name] = stats
[docs]class DummyStatsCollector(StatsCollector): def get_value(self, key, default=None, spider=None): return default def set_value(self, key, value, spider=None): pass def set_stats(self, stats, spider=None): pass def inc_value(self, key, count=1, start=0, spider=None): pass def max_value(self, key, value, spider=None): pass def min_value(self, key, value, spider=None): pass