当我们对目标网络进行漏洞扫描,或者使用sqlmap对目标网站进行注入攻击时,常常因为请求频率过高导致ip被ban,这个时候我们可以通过代理池来避免这种情况的发生,同时增加我们的隐蔽性。

构建IP代理池首先需要获取多个可靠的代理IP,思路如下

  1. 到免费的代理网站爬取代理IP
  2. 验证IP的有效性和匿名性
  3. 将有效的IP地址记录保存下来

具体实现:

到各个ip代理网站爬取代理IP,以快代理为例,并将爬取的IP地址保存到csv文件中,主要代码如下:

import requests
from bs4 import BeautifulSoup
from time import sleep
import csv

def kuaidaili_ip_spider(start,end):
    url='https://www.kuaidaili.com/free/inha/'
    csvfile=file('kuaidaili.csv','w')
    print("The result will be saved in kuaidaili.csv")
    writer=csv.writer(csvfile)
    s = requests.session()
    s.keep_alive = False
    headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36',
             'Connection': 'close'
             }
    for num in xrange(start,end+1):
        sleep(2)
        print('Now downloa ing the page' + str(num) + ' ips')
        try:
            request=requests.get(url+str(num),headers=headers,timeout=12)
        except Exception as e:
            print(e)
            pass
        content=request.text
        soup=BeautifulSoup(content,'html.parser')
        trs=soup.find_all('tr')
        for item in trs:
            try:
                temp=[]
                tds=item.find_all('td')
                temp.append(tds[0].text.encode('utf-8'))
                temp.append(tds[1].text.encode('utf-8'))
                temp.append(tds[3].text.encode('utf-8'))
                writer.writerow(temp)
            except IndexError:
                pass

通过检测icanhazip的回显验证IP的有效性和匿名性:

#coding=utf-8

import requests
import sys
import threading
from time import sleep
import csv

csvfile=open('valid_ip.csv','w')
writer=csv.writer(csvfile)

class test_thread(threading.Thread):
    def __init__(self,ip,port):
        threading.Thread.__init__(self)
        self.ip=ip
        self.port=port
        #self.protocol=protocol
    def run(self):
        s = requests.session()
        s.keep_alive = False
        requests.adapters.DEFAULT_RETRIES = 5
        proxies = {"http": "http://"+self.ip + ":" + str(self.port)}
        headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36',
            'Connection': 'close'
        }
        try:
            maskedIP = str(requests.get("http://icanhazip.com/", headers=headers, timeout=8, proxies=proxies).text).replace("\n","")
        except Exception as e:
            return
        if maskedIP == self.ip:
            writer.writerow([self.ip, self.port])
            print(self.ip+" "+self.port)

def test_proxy(filename):
    reader=csv.reader(open(filename))
    for row in reader:
        ip, port=row[0],row[1]
        while threading.activeCount() >30:
            sleep(2)
        test_thread(ip,port).start()

def main():
    filename=sys.argv[1]
    print("The result will be saved in valid_ip.csv")
    test_proxy(filename)

if __name__ == '__main__':
    main()

++完整代码地址++