File tree Expand file tree Collapse file tree 6 files changed +6
-6
lines changed
proxypool/crawlers/public Expand file tree Collapse file tree 6 files changed +6
-6
lines changed Original file line number Diff line number Diff line change 4
4
5
5
6
6
BASE_URL = 'http://www.66ip.cn/{page}.html'
7
- MAX_PAGE = 50
7
+ MAX_PAGE = 3
8
8
9
9
10
10
class Daili66Crawler (BaseCrawler ):
Original file line number Diff line number Diff line change 3
3
import re
4
4
5
5
6
- MAX_PAGE = 8
6
+ MAX_PAGE = 3
7
7
BASE_URL = 'http://www.ip3366.net/free/?stype={stype}&page={page}'
8
8
9
9
Original file line number Diff line number Diff line change 5
5
6
6
BASE_URL = 'https://ip.jiangxianli.com/api/proxy_ips?page={page}'
7
7
8
- MAX_PAGE = 10
8
+ MAX_PAGE = 3
9
9
10
10
11
11
class JiangxianliCrawler (BaseCrawler ):
Original file line number Diff line number Diff line change 5
5
6
6
7
7
BASE_URL = 'https://www.kuaidaili.com/free/{type}/{page}/'
8
- MAX_PAGE = 300
8
+ MAX_PAGE = 3
9
9
10
10
11
11
class KuaidailiCrawler (BaseCrawler ):
Original file line number Diff line number Diff line change 3
3
from pyquery import PyQuery as pq
4
4
5
5
BaseUrl = 'http://www.taiyanghttp.com/free/page{num}'
6
- MAX_PAGE = 5
6
+ MAX_PAGE = 3
7
7
8
8
9
9
class TaiyangdailiCrawler (BaseCrawler ):
Original file line number Diff line number Diff line change 5
5
6
6
BASE_URL = "http://www.xsdaili.cn/"
7
7
PAGE_BASE_URL = "http://www.xsdaili.cn/dayProxy/ip/{page}.html"
8
- MAX_PAGE = 50
8
+ MAX_PAGE = 3
9
9
10
10
11
11
class XiaoShuCrawler (BaseCrawler ):
You can’t perform that action at this time.
0 commit comments