Use scrapy for Logging, scrapylogging
# Coding: UTF-8 _ author _ = 'similarface' ######################### use of Logging ### ################## import logging ''' 1. logging. CRITICAL-for critical errors (highest severity) fatal error 2. logging. ERROR-for regular errors ERROR 3. logging. WARNING-for warning messages WARNING + error 4. logging. INFO-for informational messages Message + warning + error 5. logging. DEBUG-for debugging messages (lowest severity) low level '''logging. warning ("This is a warning") logging. log (logging. WARNING, "This is a warning") # obtain the Instance Object logger = logging. getLogger () logger. warning ("this is a warning message") # specify the message sender logger = logging. getLogger ('similarface') logger. warning ("This is a warning") # Use logimport scrapyclass MySpider (scrapy. spider): name = 'myspider 'start_urls = ['HTTP: // scrapinghub.com'] def parse (self, response ): # logger self.logger.info ('parse function called on % s', response. url) # method 2 define a logger logger.info ('parse function called on % s', response. url) '''logging settings • LOG_FILE • LOG_ENABLED • Logging • LOG_LEVEL • LOG_FORMAT • LOG_DATEFORMAT • Use -- logfile FILEOverrides LOG_FILE -- loglevel/-L limit LOG_LEVEL -- nosets LOG_ENABLED to false' ''import loggingfrom scrapy. utils. log import configure_loggingconfigure_logging (install_root_handler = False) # defines some logging attributes. basicConfig (filename = '/Users/similarface/PycharmProjects/FluentPython/log.txt', format = '% (levelname) s: % (message) s ', level = logging. INFO) # append mode logging.info ('Log file') logger = logging. getLogger ('similarface') logger. warning ("Log files also required ")