Skip to content

Commit 67d6938

Browse files
committed
fix bugs for logger
1 parent 001585a commit 67d6938

File tree

4 files changed

+8
-8
lines changed

4 files changed

+8
-8
lines changed

crawler/redis_spiders.py

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,7 @@
88
Spider, CrawlSpider)
99
from scrapy_splash import SplashRequest
1010

11-
from logger import (
12-
crawler_logger, validator_logger)
11+
from logger import crawler_logger
1312
from utils import get_redis_conn
1413
from config.settings import (
1514
VALIDATOR_FEED_SIZE, SPIDER_FEED_SIZE)
@@ -49,7 +48,7 @@ def next_requests(self):
4948
yield req
5049
found += 1
5150

52-
crawler_logger.debug('Read {} requests from {}'.format(found, self.task_queue))
51+
crawler_logger.info('Read {} requests from {}'.format(found, self.task_queue))
5352

5453
def schedule_next_requests(self):
5554
for req in self.next_requests():
@@ -97,7 +96,7 @@ def next_requests(self):
9796
yield req
9897
found += 1
9998

100-
crawler_logger.debug('Read {} requests from {}'.format(found, self.task_queue))
99+
crawler_logger.info('Read {} requests from {}'.format(found, self.task_queue))
101100

102101

103102
class ValidatorRedisSpider(RedisSpider):
@@ -122,7 +121,7 @@ def next_requests_process(self, task_queue):
122121
callback=self.parse, errback=self.parse_error)
123122
yield req
124123
found += 1
125-
validator_logger.debug('Read {} ip proxies from {}'.format(found, task_queue))
124+
crawler_logger.info('Read {} ip proxies from {}'.format(found, task_queue))
126125

127126
def parse_error(self, failure):
128127
raise NotImplementedError

crawler/validators/base.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from twisted.internet.error import (
77
TimeoutError, TCPTimedOutError)
88

9-
from logger import validator_logger
9+
from logger import crawler_logger
1010
from ..items import (
1111
ProxyScoreItem, ProxyVerifiedTimeItem,
1212
ProxySpeedItem)
@@ -58,7 +58,7 @@ def is_transparent(self, response):
5858
def parse_error(self, failure):
5959
request = failure.request
6060
proxy = request.meta.get('proxy')
61-
validator_logger.error('proxy {} has been failed,{} is raised'.format(proxy, failure))
61+
crawler_logger.error('proxy {} has been failed,{} is raised'.format(proxy, failure))
6262
if failure.check(TimeoutError, TCPTimedOutError):
6363
decr = -1
6464
else:

logger/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@
5656
},
5757
'other_logger': {
5858
'handlers': ['file'],
59-
'level': 'info',
59+
'level': 'INFO',
6060
}
6161
}
6262
}

scheduler/scheduler.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,7 @@ def crawler_start(usage, tasks):
205205
for case in cases:
206206
if case.check(task, maps):
207207
spiders.append(case.spider)
208+
break
208209
else:
209210
crawler_logger.warning('spider task {} is invalid task, the allowed tasks are {}'.format(
210211
task, list(maps.keys())))

0 commit comments

Comments
 (0)