BOT_NAME = 'python_city_data'SPIDER_MODULES = ['python_city_data.spiders']NEWSPIDER_MODULE = 'python_city_data.spiders'# Crawl responsibly by identifying yourself (and your website) on the user-agent#USER_AGENT = 'python_city_data (+http://www.yourdomain.com)'#换伪造请求头USER_AGENT = "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36"# Obey robots.txt rulesROBOTSTXT_OBEY = False