37,719
社区成员
发帖
与我相关
我的任务
分享
def aqi(crawler, spider):
try:
runner = CrawlerRunner(settings)
db = myDbConnect()
spider = db.query(TSpiderC).filter(TSpiderC.uuid == u'1e627cd3c6ee8c540318006de209983b').one()
# crawler.crawl(CSpider,rule=spider)
# crawler.start()
d = runner.crawl(CSpider, rule=spider)
d = runner.join()
d.addBoth(lambda _: reactor.stop())
try:
reactor.run()
except Exception as e:
print e
except Exception, e:
print e,e.message
pass
if __name__ == '__main__':
settings = get_project_settings()
crawler = CrawlerProcess(settings)
scheduler = BackgroundScheduler()
# scheduler = TwistedScheduler()
scheduler.daemonic=False
cron = CronTrigger(second='*/20')
scheduler.add_job(aqi, cron, args=[crawler, None])
scheduler.start()
settings = get_project_settings()
configure_logging(settings)
while True:
time.sleep(1)
print 'sleep..................'