sqlmap自动爬取Ip建立IP池绕过拉黑
@xf555we 之前在网上看到了一篇sqlmap绕过拉黑IP的文章,而且给了本地代理IP池的python文件源码,但是要自己去找可用的IP,于是弄了个爬取IP的文件生成可用IP再放入代理池,如果你觉得IP不够的话可以在daili.py自行添加方法#
使用方法
1、先运行daili.py,然后再运行proxy.py。 2、启动sqlmap,--proxy=http://127.0.0.1:9999
daili.py
import random
import requests
from fake_useragent import UserAgent
from retrying import retry
import lxml.html
from pyquery import PyQuery as pq
class ReptileIp(object):
def __init__(self,init_url):
__ua = UserAgent()
self.init_url = init_url
self.headers = {'User-Agent':__ua.random}
@retry(stop_max_attempt_number=3)
def download_html(self, url_str, data, method, proxies):
if method == 'POST':
result = requests.post(url_str, data=data, headers=self.headers, proxies=proxies)
else:
result = requests.get(url_str, headers=self.headers, timeout=3, proxies=proxies)
assert result.status_code == 200
return result.content
def download_url(self, url_str, data=None, method='GET', proxies={}):
try:
result = self.download_html(url_str, data, method, proxies)
except Exception as e:
print(e)
result = None
return result
def filter_html(self, content):
html = lxml.html.fromstring(content)
data_host = html.xpath('//table/tr/td[last()-8]/text()')
data_port = html.xpath('//table/tr/td[last()-7]/text()')
data_http = html.xpath('//table/tr/td[last()-4]/text()')
proxies = []
for num in range(len(data_http)):
http = data_http[num]
host = data_host[num]
port = data_port[num]
https = http + '://' + host + ':' + port
proxies.append(https) # 添加进list
return proxies
def kuaidaili(self):
html = requests.get("https://www.kuaidaili.com/free").text
proxies = []
ips = []
ports = []
if html:
doc = pq(html)
items = doc("#content .con-body table tr").items()
for item in items:
tds = item("td").items()
for td in tds:
if td.attr("data-title") == "IP":
ip = td.text()
ips.append(ip)
if td.attr("data-title") == "PORT":
ports.append(td.text())
for i in range(len(ips)):
proxie = "http://" + ips[i] + ":" + ports[i]
proxies.append(proxie)
return proxies
def daili89(self):
url = "http://www.89ip.cn/index_1.html"
html = requests.get(url).text
proxies = []
portss = []
ipss = []
doc = pq(html)
ips = doc(
'body > div.layui-row.layui-col-space15 > div.layui-col-md8 > div > div.layui-form > table > tbody > tr > td:nth-child(1)').items()
for ip in ips:
ipss.append(ip.text())
ports = doc(
'body > div.layui-row.layui-col-space15 > div.layui-col-md8 > div > div.layui-form > table > tbody > tr > td:nth-child(2)').items()
for port in ports:
portss.append(port.text())
for i in range(len(ipss)):
proxie = "http://" + ipss[i] + ":" + portss[i]
proxies.append(proxie)
return proxies
def run(self):
url_str = self.init_url
html_content = self.download_url(url_str)
proxies_list = '!'.join(self.filter_html(html_content))
proxies_list2 = "!".join(self.kuaidaili())
proxies_list3 = "!".join(self.daili89())
with open('ip.txt', 'w+') as fp:
fp.write(proxies_list+"!"+proxies_list2+"!"+proxies_list3)
def verify_ip(self):
with open('ip.txt', 'r') as fp:
proxies_list = fp.read()
proxies_list = proxies_list.split('!')
p_list = random.sample(proxies_list, 4)
invalid_ip = []
for url in p_list:
proxies = {'http': url}
req = requests.get('https://www.baidu.com', headers=self.headers, proxies=proxies)
if req.status_code == 200:
continue
else:
invalid_ip.append(url)
for i in invalid_ip:
proxies_list.remove(i)
return proxies_list
if __name__ == '__main__':
ip = ReptileIp('https://www.xicidaili.com/nn/2')
# 获取 run()执行一次就可以了,
ip.run()
# 校验,并取1个ip
#print(ip.verify_ip()) #
proxy.py
import socket
from socket import error
import threading
import random
import time
from daili import ReptileIp
localtime = time.asctime(time.localtime(time.time()))
class ProxyServerTest():
def __init__(self, proxyip):
# 本地socket服务
self.ser = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.proxyip = proxyip
def run(self):
try:
# 本地服务IP和端口
self.ser.bind(('127.0.0.1', 9999))
# 最大连接数
self.ser.listen(5)
except error as e:
print("[-]The local service : " + str(e))
return "[-]The local service : " + str(e)
while True:
try:
# 接收客户端数据
client, addr = self.ser.accept()
print('[*]accept %s connect' % (addr,))
data = client.recv(1024)
if not data:
break
print('[*' + localtime + ']: Accept data...')
except error as e:
print("[-]Local receiving client : " + str(e))
return "[-]Local receiving client : " + str(e)
while True:
# 目标代理服务器,将客户端接收数据转发给代理服务器
mbsocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
iplen = len(self.proxyip)
proxyip = self.proxyip[random.randint(0, iplen - 1)]
print("[!]Now proxy ip:" + str(proxyip))
prip = proxyip[0]
prpo = proxyip[1]
try:
mbsocket.settimeout(3)
mbsocket.connect((prip, prpo))
except:
print("[-]RE_Connect...")
continue
break
# except :
# print("[-]Connect failed,change proxy ip now...")
# pass
try:
mbsocket.send(data)
except error as e:
print("[-]Sent to the proxy server : " + str(e))
return "[-]Sent to the proxy server : " + str(e)
while True:
try:
# 从代理服务器接收数据,然后转发回客户端
data_1 = mbsocket.recv(1024)
if not data_1:
break
print('[*' + localtime + ']: Send data...')
client.send(data_1)
except socket.timeout as e:
print(proxyip)
print("[-]Back to the client : " + str(e))
continue
# 关闭连接
client.close()
mbsocket.close()
def main():
print('''*Atuhor : V@1n3R.
*Blog :http://www.Lz1y.cn
*date: 2017.7.17
*http://www.Lz1y.cn/wordpress/?p=643
__ __ _ _____ ____
/ /_ _/ |_ __ |___ /| _
/ / _` | | '_ |_ | |_) |
V / (_| | | | | |___) | _ < _
\_/ \__,_|_|_| |_|____/|_| \_(_)
''')
ip = ReptileIp('https://www.xicidaili.com/nn/2')
ip_list = ip.verify_ip()
# ip_list = [('118.89.148.92',8088)]
# ip_list = tuple(ip_list)
try:
pst = ProxyServerTest(ip_list)
# 多线程
t = threading.Thread(target=pst.run, name='LoopThread')
print('[*]Waiting for connection...')
# 关闭多线程
t.start()
t.join()
except Exception as e:
print("[-]main : " + str(e))
return "[-]main : " + str(e)
if __name__ == '__main__':
main()