scrapy之Logging使用
#coding:utf-8 __author__ = 'similarface' ###################### ##Logging的使用 ###################### import logging ''' 1. logging.CRITICAL - for critical errors (highest severity) 致命错误 2. logging.ERROR - for regular errors 一般错误 3. logging.WARNING - for warning messages 警告+错误 4. logging.INFO - for informational messages 消息+警告+错误 5. logging.DEBUG - for debugging messages (lowest severity) 低级别 ''' logging.warning("This is a warning") logging.log(logging.WARNING,"This is a warning") #获取实例对象 logger=logging.getLogger() logger.warning("这是警告消息") #指定消息发出者 logger = logging.getLogger('SimilarFace') logger.warning("This is a warning") #在爬虫中使用log import scrapy class MySpider(scrapy.Spider): name = 'myspider' start_urls = ['http://scrapinghub.com'] def parse(self, response): #方法1 自带的logger self.logger.info('Parse function called on %s', response.url) #方法2 自己定义个logger logger.info('Parse function called on %s', response.url) ''' Logging 设置 • LOG_FILE • LOG_ENABLED • LOG_ENCODING • LOG_LEVEL • LOG_FORMAT • LOG_DATEFORMAT • LOG_STDOUT 命令行中使用 --logfile FILE Overrides LOG_FILE --loglevel/-L LEVEL Overrides LOG_LEVEL --nolog Sets LOG_ENABLED to False ''' import logging from scrapy.utils.log import configure_logging configure_logging(install_root_handler=False) #定义了logging的些属性 logging.basicConfig( filename='/Users/similarface/PycharmProjects/FluentPython/log.txt', format='%(levelname)s: %(levelname)s: %(message)s', level=logging.INFO ) #运行时追加模式 logging.info('进入Log文件') logger = logging.getLogger('SimilarFace') logger.warning("也要进入Log文件")