KAPU  
能实时监测流量,
只显示有问题的流量,
可疑流量要显示出在那个数据包里
所有流量都保存到为pcap
每5000个包保存一个
第3个自动下载到本地
 
    def sniff(count=0, store=1, offline=None, prn=None,filter=None, L2socket=None, timeout=None, opened_socket=None, stop_filter=None, iface=None,*args,**kargs)
 
    count:抓包的数量,0表示无限制;
    store:保存抓取的数据包或者丢弃,1保存,0丢弃
    offline:从 pcap 文件读取数据包,而不进行嗅探,默认为None
    prn:为每一个数据包定义一个函数,如果返回了什么,则显示。例如:prn = lambda x: x.summary(); (  packct.summar()函数返回的是对包的统计性信息 )
    filter:过滤规则,使用wireshark里面的过滤语法
    L2socket:使用给定的 L2socket
    timeout:在给定的时间后停止嗅探,默认为 None
    opened_socket:对指定的对象使用 .recv() 进行读取;
    stop_filter:定义一个函数,决定在抓到指定数据包后停止抓包,如:stop_filter = lambda x: x.haslayer(TCP);
    iface:指定抓包的接口
 
 
 
嗅探一个数据包
#coding=utf-8
from scapy.all import *
 
def packet_callbacke(packet):
    print packet.show()
 
 
sniff(prn=packet_callbacke,count=1)
 
 
设置过滤器
实时捕获数据包
#coding=utf-8
from scapy.all import *
# 数据包回调函数
def packet_callback(packet):
    if packet[TCP].payload:
        mail_packet = str(packet[TCP].payload)
        if "user" in mail_packet.lower() or "pass" in mail_packet.lower():
            print "[*] Server: %s" % packet[IP].dst
            print "[*] %s" % packet[TCP].payload
# 开启嗅探器
sniff(filter="tcp port 80",prn=packet_callback,store=0)
 
 
30秒抓一次并保存
#coding=utf-8
from scapy.all import *
import os
# 数据包回调函数
def packet_callback(packet):
    if packet[TCP].payload:
        mail_packet = str(packet[TCP].payload)
        #print packet
        #print mail_packet.lower()
        if "user" in mail_packet.lower() or "pass" in mail_packet.lower():
            print "[*] Server: %s" % packet[IP].dst
            print "[*] %s" % packet[TCP].payload
 
# 开启嗅探器,过滤出tcp协议,一次抓30秒,
package=sniff(filter="tcp",timeout=30 ,prn=packet_callback,store=1)
 
#30s抓完后确定文件名,现在你目录下放一个test.pcap,不然会报错
#第二个包保存的文件名就是5000
 
j = 1
flowName = "test" + str(j) + ".pcap"
wrpcap(flowName,package)  #将抓取到的包保存为test.pcap文件
 
从数据包中提取出部分信息,重要的是
raw_http = p["TCP"].payload.original
#coding=utf-8
from scapy.all import *
 
 
try:
    import scapy.all as scapy
except ImportError:
    import scapy
    
    
def parse_http_pcap(pcap_path):
    pcap_infos = list()
    packets = scapy.rdpcap(pcap_path)
    for p in packets:
        print "----"
        # 判断是否包含某一层,用haslayer
        if p.haslayer("IP"):
            src_ip = p["IP"].src
            dst_ip = p["IP"].dst
            print "sip: %s" % src_ip
            print "dip: %s" % dst_ip
        if p.haslayer("TCP"):
            #获取某一层的原始负载用.payload.original
            raw_http = p["TCP"].payload.original
            sport = p["TCP"].sport
            dport = p["TCP"].dport
            print "sport: %s" % sport
            print "dport: %s" % dport
            print "raw_http:\n%s" % raw_http
        
        if p.haslayer("HTTPRequest"):
            host = p["HTTPRequest"].Host
            uri = p["HTTPRequest"].Path
            # 直接获取提取好的字典形式的http数据用fields
            http_fields = p["HTTPRequest"].fields
            http_payload = p["HTTPRequest"].payload.fields
            print "host: %s" % host
            print "uri: %s" % uri
            print "http_fields:\n%s" % http_fields
            print "http_payload:\n%s" % http_payload
            
            
parse_http_pcap("test.pcap")
 
嗅探登陆密码
def ftpsniff(pkt):
  dest = pkt.getlayer(IP).dst
  raw = pkt.sprintf('%Raw.load%')
  user = re.findall('(?i)USER (.*)', raw)
  pswd = re.findall('(?i)PASS (.*)', raw)
  if user:
    print '[*] Detected FTP Login to ' + str(dest)
    print '[+] Username: ' + str(user[0])
  elif pswd:
    print '[+] Password: ' + str(pswd[0])
 
 
 
 
posted on 2020-01-04 17:30  Vegitable_Bird  阅读(3269)  评论(0编辑  收藏  举报