使用 apscheduler 做定时任务, scrapy 做爬虫,但是爬虫只执行了一次,后面就没有继续抓取, aqi 函数是执行的,下面是代码:
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
import time
from log.make_log import make_log_file
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
from spider.spiders.aqi import AqiSpider
def aqi(crawler, spider):
try:
crawler.crawl(spider)
crawler.start()
except Exception, e:
make_log_file(str(e),'scrapy')
if __name__ == '__main__':
settings = get_project_settings()
crawler = CrawlerProcess(settings)
spider = AqiSpider()
scheduler = BackgroundScheduler()
scheduler.daemonic=False
cron = CronTrigger(second='*/30')
scheduler.add_job(aqi, cron, args=[crawler, spider])
scheduler.start()
while True:
time.sleep(1000)