北极熊用着有点蛋疼

import requests
import mechanize
import cookielib
import sys
import IPy
import threading
import Queue
import argparse
class findTitle(object):
    def __init__(self,host,port,threads):
        self.threads = threads
        self.lock = threading.Lock()
        self.queue = Queue.Queue()
        self.host = host
        self.findc()
        self.port = ['80','8080']
        if port.strip():
            self.port = ['80','8080']+port.split(',')
        self.into_queue()
    def findc(self):
        try:
            self.iplist = IPy.IP(self.host)
        except Exception,e:
            print e
        return
    def into_queue(self):
        for ip in self.iplist:
            for port in self.port:
                url = "http://%s:%s"%(str(ip),str(port))
                self.queue.put(url)
    def find_title(self,url):
        try:
            br = mechanize.Browser()
            br.set_cookiejar(cookielib.LWPCookieJar()) # Cookie jar
            br.set_handle_equiv(True) # Browser Option
            br.set_handle_redirect(True)
            br.set_handle_referer(True)
            br.set_handle_robots(False)
            br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
            br.addheaders = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1')]
            br.open(url)
            t = br.title().decode('utf-8').encode('gb2312')
            return t
        except Exception,e:
                return ''
    def find_info(self,url):
        server = ''
        powered = ''
        try:
            r = requests.get(url,timeout=4)
            try:
                server = r.headers['Server']
            except Exception,e:
                #print e
                pass
            try:
                powered = r.headers['X-Powered-By']
            except Exception,e:
                #print e
                pass
        except Exception,e:
            #print e
            pass
        return server.strip()+"  "+powered.strip()
    def scan(self):
        while True:
            if self.queue.qsize() == 0:
                break
            url = self.queue.get()
            title = self.find_title(url)
            info = self.find_info(url)
            sum = title+"  "+info
            if sum.strip():
                self.lock.acquire()
                print url+"  "+sum
                self.lock.release()
            self.queue.task_done()
        return
    def start(self):
        thread=[]
        for i in range(self.threads):
            t = threading.Thread(target=self.scan)
            t.setDaemon(True)
            t.start()
            thread.append(t)
        for t in thread:
            t.join()

#a = findTitle('43.242.49.0/24','',200)
#a.start()
if __name__ == "__main__":
    if len(sys.argv) < 2:
        print "127.0.0.0/24 port(default 80 8080) threads(default=256)"
        sys.exit()
    host = sys.argv[1]
    port = ''
    threads=256
    try:
        port = sys.argv[2]
    except:
        pass
    try:
        threads = sys.argv[3]
    except:
        pass
    
    a = findTitle('43.242.49.0/24','',200)
    a.start()







    
    
        

posted on 2016-05-28 15:42  lly001  阅读(123)  评论(0编辑  收藏  举报

导航