|
| 1 | +##--- Auto Website Visitor by khalsalabs ---- #### |
| 2 | +import urllib2 |
| 3 | +import random |
| 4 | +import threading |
| 5 | +import sys |
| 6 | +import time |
| 7 | +class Connect: |
| 8 | + req = urllib2.Request('http://your-website-name.com') |
| 9 | + con_sucess, con_failed, con_total=0,0,0 |
| 10 | + url_total, proxy_total= 0,0 |
| 11 | + url_list =[] |
| 12 | + proxy_list= [] |
| 13 | + agent_list =[] |
| 14 | + |
| 15 | +def __init__(self): |
| 16 | + pF = open('proxy.txt','r') |
| 17 | + pR = pF.read() |
| 18 | + self.proxy_list = pR.split('n') |
| 19 | + |
| 20 | + uF = open('url.txt','r') |
| 21 | + uR = uF.read() |
| 22 | + self.url_list = uR.split('n') |
| 23 | + |
| 24 | + aF = open('agent.txt','r') |
| 25 | + aR = aF.read() |
| 26 | + self.agent_list = aR.split('n') |
| 27 | + |
| 28 | +def prep_con(self): |
| 29 | + rURL = random.randint(0,(len(self.url_list))-1) |
| 30 | + self.req = urllib2.Request(self.url_list[rURL]) |
| 31 | + rAGENT = random.randint(0,(len(self.agent_list))-1) |
| 32 | + self.req.add_header('User-agent',self.agent_list[rAGENT]) |
| 33 | + |
| 34 | +def make_con(self): |
| 35 | + count, time_stamp =0,0 |
| 36 | + for proxy in self.proxy_list: |
| 37 | + self.req.set_proxy(proxy,'http') |
| 38 | + if count%4==0: |
| 39 | + if self.con_total < 2*count: |
| 40 | + time_stamp = 6 |
| 41 | + else: |
| 42 | + time_stamp = 3 |
| 43 | + threading.Thread(target=self.visitURL).start() |
| 44 | + time.sleep(time_stamp) |
| 45 | + count += 1 |
| 46 | + |
| 47 | +def visit(self): |
| 48 | + try: |
| 49 | + f = urllib2.urlopen(self.req) |
| 50 | + self.con_sucess += 1 |
| 51 | + except: |
| 52 | + self.con_failed += 1 |
| 53 | + self.con_total += 1 |
| 54 | + print self.con_total,"total connections, success = ",self.con_sucess," failed= ",self.con_failed |
| 55 | + |
| 56 | +def visitURL(self): |
| 57 | + self.prep_con() |
| 58 | + self.visit() |
| 59 | + |
| 60 | +if __name__ == "__main__": |
| 61 | + cnct = Connect() |
| 62 | + cnct.make_con() |
0 commit comments