config.txt是配置文件
url.txt是要扫描的url
内部配置相关的常见编辑器漏洞和svn源码泄露漏洞
多线程运行
程序的思路是先检测漏洞,在扫描备份,文件结果自动保存在当前目录
附上config.txt
/a.zip
/web.zip
/web.rar
/1.rar
/bbs.rar
/www.root.rar
/123.rar
/data.rar
/bak.rar
/oa.rar
/admin.rar
/www.rar
/2014.rar
/2015.rar
/2016.rar
/2014.zip
/2015.zip
/2016.zip
/1.zip
/1.gz
/1.tar.gz
/2.zip
/2.rar
/123.rar
/123.zip
/a.rar
/a.zip
/admin.rar
/back.rar
/backup.rar
/bak.rar
/bbs.rar
/bbs.zip
/beifen.rar
/beifen.zip
/beian.rar
/data.rar
/data.zip
/db.rar
/db.zip
/flashfxp.rar
/flashfxp.zip
/fdsa.rar
/ftp.rar
/gg.rar
/hdocs.rar
/hdocs.zip
/HYTop.mdb
/root.rar
/Release.rar
/Release.zip
/sql.rar
/test.rar
/template.rar
/template.zip
/upfile.rar
/vip.rar
/wangzhan.rar
/wangzhan.zip
/web.rar
/web.zip
/website.rar
/www.rar
/www.zip
/wwwroot.rar
/wwwroot.zip
/wz.rar
/备份.rar
/网站.rar
/新建文件夹.rar
/新建文件夹.zip
附上源代码
#coding = gbk
import requests
import time
import Queue
import threading
import urllib2
import socket
timeout=3
socket.setdefaulttimeout(timeout)
q = Queue.Queue()
print r'''
'&#$' '|!|:
!%|!;$; `;||%$|!:. ;!':';|'
;|:!!:%@%:``````...'!:`:::'!!`
'!;|%:```.````````````.`':':!'
!%'```````''```````''`````'|;
. :|:.```'!
!'`;$@###&|'``````.;|'
. :%'.```;|;:;!:;%%!::''::'``````.`|; .
`!:.`````:|@$;````'!|;:'``````````'!: .
. :!```...``':''````''```````````````;;. .
!; .''```. .:' ...`.:!' .
':;; .:;;&@@&;. `||' .'!' .
:|:;|' .`. .``` .:!` .
'$%:!!` ..`;:``...`'` .!! .
'%%':!|!' '%
!'`;$@###&|'``````.;|' . :%'.```;|;:;!:;%%!::''::'``````.`|; . `!:.`````:|@$;````'!|;:'``````````'!: . . :!```...``':''````''```````````````;;. . !; .''```. .:' ...`.:!' . ':;; .:;;&@@&;. `||' .'!' . :|:;|' .`. .``` .:!` . '$%:!!` ..`;:``...`'` .!! . '%%':!|!' '%
|'. .`. .;!. .
';. .:' ...;|` .
.;: !&!'.. ....````.``!; .
'!` ;; ..``. ..``!: .
.;: .```..:;. .
.!;. ;%:.```````.`;' .
;&@$||' .!!````````````.|! .
'!' ;! .;;..`:%&$!````..!; .
'''
print unicode('''
本软件可同步实现编辑器漏洞&svn源码泄露&常见备份扫描
自动甄别无法连接的网站,自动处理
作者:
QQ:982722261
完成时间:2017年2月9日11:04:53
''','gbk')
time.sleep(5)
headers={'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64)AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102Safari/537.36'}
f1 = open('url.txt','r')
f2 = f1.readlines()
f1.close()
f3 = open('result.txt','a+')
f9 = open('config.txt','r') #配置文件载入
f4 = f9.readlines()
f9.close()
def rarzip(): #开始构建扫描函数
try: #域名rarzip扫描
print yumingrar
reqryumingrar = urllib2.Request(url=yumingrar,headers=headers)
ryumingrar = urllib2.urlopen(reqryumingrar)
if ryumingrar.code == 200:
metarar = ryumingrar.info()
sizerar = str(metarar.getheaders("Content-Length")[0]) #文件大小
sizerar1 = int(metarar.getheaders("Content-Length")[0])
if sizerar1 > 8888:
print '★★★★★Found A Success Url Maybe backups★★★★★'
print yumingrar
print 'Size:' + sizerar + 'Kbs'
f3.write(yumingrar + '----------' + sizerar + 'Kbs' + '\n')
else:
print '888 Safe Dog I Fuck You 888'
else:
print '[+]Pass.........................'
except:
pass
try:
print yumingzip
reqryumingzip = urllib2.Request(url=yumingzip,headers=headers)
ryumingzip = urllib2.urlopen(reqryumingrar)
if ryumingzip.code == 200:
metazip = ryumingrar.info()
sizezip = str(metazip.getheaders("Content-Length")[0])
sizezip1 = int(metazip.getheaders("Content-Length")[0])
if sizezip1 > 8888:
print '★★★★★Found A Success Url Maybe backups★★★★★'
print yumingzip
print 'Size:' + sizezip + 'Kbs'
f3.write(yumingzip + '----------' + sizezip + 'Kbs' + '\n')
else:
print '888 Safe Dog I Fuck You 888'
else:
print '[+]Pass.........................'
except:
pass
def svn():
try: #svn漏洞扫描
print yumingsvn
ryumingsvn = requests.get(url=yumingsvn,headers=headers,allow_redirects=False,timeout=3)
if ryumingsvn_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(yumingsvn + ' 【SVN源码泄露漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
def eweb():
try: #ewebeditor漏洞扫描
print '---------------Ewebeditor Vulnerability Scan---------------'
print eweb1
reweb1 = requests.get(url=eweb1,headers=headers,allow_redirects=False,timeout=3)
if reweb1_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(eweb1 + ' 【Ewebeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print eweb2
reweb2 = requests.get(url=eweb2,headers=headers,allow_redirects=False,timeout=3)
if reweb2_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(eweb2 + ' 【Ewebeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print eweb3
reweb3 = requests.get(url=eweb3,headers=headers,allow_redirects=False,timeout=3)
if reweb3_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(eweb3 + ' 【Ewebeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print eweb4
reweb4 = requests.get(url=eweb4,headers=headers,allow_redirects=False,timeout=3)
if reweb4_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(eweb4 + ' 【Ewebeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print eweb5
reweb5 = requests.get(url=eweb5,headers=headers,allow_redirects=False,timeout=3)
if reweb5_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(eweb5 + ' 【Ewebeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print eweb6
reweb6 = requests.get(url=eweb6,headers=headers,allow_redirects=False,timeout=3)
if reweb6_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(eweb6 + ' 【Ewebeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
def fck():
try: #fckeditor漏洞扫描
print '---------------Fckeditor Vulnerability Scan---------------'
print fck1
rfck1 = requests.get(url=fck1,headers=headers,allow_redirects=False,timeout=3)
if rfck1_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(fck1 + ' 【Fckeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print fck2
rfck2 = requests.get(url=fck2,headers=headers,allow_redirects=False,timeout=3)
if rfck2_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(fck2 + ' 【Fckeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print fck3
rfck3 = requests.get(url=fck3,headers=headers,allow_redirects=False,timeout=3)
if rfck3_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(fck3 + ' 【Fckeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print fck4
rfck4 = requests.get(url=fck4,headers=headers,allow_redirects=False,timeout=3)
if rfck4_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(fck4 + ' 【Fckeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print fck5
rfck5 = requests.get(url=fck5,headers=headers,allow_redirects=False,timeout=3)
if rfck5_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(fck5 + ' 【Fckeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
try:
print fck6
rfck6 = requests.get(url=fck6,headers=headers,allow_redirects=False,timeout=3)
if rfck6_status == 200:
print '★★★★★Found A Success Url Maybe Vulnerability★★★★★'
f3.write(fck6 + ' 【Fckeditor编辑器漏洞】' + '\n')
else:
print '[+]Pass.........................'
except:
print "[+]Can not connect url"
pass
for i in f2:
c = i.strip('\n')
print c
try:
ceshi = requests.get(url=c,headers=headers,allow_redirects=False,timeout=3)
if ceshi.status_code == 200:
a = c.split(".",2)[1] #获取主域名
yumingrar = c + '/' + a + '.rar' #构造域名 + zip 的备份
yumingzip = c + '/' + a + '.zip'
rarzip()
#开始对一系列特殊漏洞后缀构造url
yumingsvn = c + '/.svn/entries' #svn漏洞
svn()
eweb1 = c + '/editor/editor/filemanager/browser/default/connectors/test.html' #ewebeditor编辑器漏洞
eweb2 = c + '/editor/editor/filemanager/connectors/test.html'
eweb3 = c + '/editor/editor/filemanager/connectors/uploadtest.html'
eweb4 = c + '/html/db/ewebeditor.mdb'
eweb5 = c + '/db/ewebeditor.mdb'
eweb6 = c + '/db/ewebeditor.asp'
eweb()
fck1 = c + '/fckeditor/editor/filemanager/browser/default/connectors/test.html' #fckeditor编辑器漏洞
fck2 = c + '/fckeditor/editor/filemanager/connectors/test.html'
fck3 = c + '/FCKeditor/editor/filemanager/connectors/uploadtest.html'
fck4 = c + '/FCKeditor/editor/filemanager/upload/test.html'
fck5 = c + '/fckeditor/editor/filemanager/browser/default/browser.html'
fck6 = c + '/FCKeditor/editor/fckeditor.html'
fck()
else:
pass
except:
print "NO USE URL WHAT FUCK A BIG URL"
pass
for i in f2:
c = i.strip('\n')
try:
ce = requests.get(url=c,headers=headers,allow_redirects=False,timeout=3)
if ce.status_code == 200:
q.put(c)
else:
pass
except:
print "NO USE URL WHAT FUCK A BIG URL"
pass
def starta():
print '---------------Start Backups Scan---------------' #开始从字典载入了~
while not q.empty():
zhaohan = q.get() #url网址载入队列了
for f5 in f4:
f6 = f5.strip('\n') #正确的备份内容
urlx = zhaohan + f6 #正确的网址 + 备份
print urlx
try:
req = urllib2.Request(url=urlx,headers=headers)
response = urllib2.urlopen(req)
if response.code == 200:
meta = response.info()
sizes = str(meta.getheaders("Content-Length")[0])
sizess = int(meta.getheaders("Content-Length")[0])
if sizess < 8888:
print '888 Safe Dog I Fuck You 888'
else:
print '★★★★★Found A Success Url Maybe backups★★★★★'
print 'Size:' + sizes + 'Kbs'
f3.write(urlx + '----------' + sizes + '\n')
else:
print '[+]Pass.........................'
except:
pass
thread1 = threading.Thread(target = starta())
thread1.start()
f3.close()
print '--------------------------------------------------------------------'
print '--------------------------------OVER--------------------------------'
print '--------------------------------------------------------------------'
time.sleep(10)
exit()