wb 黑名单批量操作

0. 参考

yu961549745/WeiboBlackList  微博批量拉黑

1. 代码 block.py

更新内容:多线程,urllib.request 改为 requests + session

改成从 firefox 或 chrome 读取 cookie 更方便,懒得改了

#!/usr/bin/env python
# -*- coding: UTF-8 -*

import requests
from requests.exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
                                ProxyError, RetryError, InvalidSchema)
import threading
import Queue
import traceback

import logging
def get_logger():
    logger = logging.getLogger("threading_example")
    logger.setLevel(logging.DEBUG)
    
    # fh = logging.FileHandler("d:/threading.log")
    fh = logging.StreamHandler()
    fmt = '%(asctime)s - %(threadName)-10s - %(levelname)s - %(message)s'
    formatter = logging.Formatter(fmt)
    fh.setFormatter(formatter)

    logger.addHandler(fh)
    return logger
    
logger = get_logger() 
    

  
def block():
    while True:
        try:
            uid = task_queue.get()
            data = dict(payload)  #dict
            data.update({'uid': uid})
            resp = s.post(url, data=data)
        except (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
                ProxyError, RetryError, InvalidSchema) as err:
            task_queue.task_done()  ############### 重新 put 之前需要 task_done ,才能保证释放 task_queue.join() 
            task_queue.put(uid)     
        except Exception as err:
            logger.debug(u'\nuid: {}\nerr: {}\ntraceback: {}'.format(uid, err, traceback.format_exc()))
            task_queue.task_done()  ############### 重新 put 之前需要 task_done ,才能保证释放 task_queue.join() 
            task_queue.put(uid)
        else:
            try:
                code = resp.json()['code']
                if code != '100000':
                    logger.debug(u'uid: {} code: {}'.format(uid, code))
                else:
                    logger.debug(u'uid: {}'.format(uid))    
            except Exception as err:
                logger.debug(u'\nuid: {}\nresp: {}\nerr: {}\ntraceback: {}'.format(uid, resp.text, err, traceback.format_exc()))
            finally:
                task_queue.task_done()
                


if __name__ == '__main__':
    
    # lines: request in raw format captured from Fiddler 
    '''
    ['POST http://weibo.com/aj/filter/block?ajwvr=6 HTTP/1.1',
     'Host: weibo.com',
     'User-Agent: Mozilla/5.0 (Windows NT 6.1; WOW64; rv:56.0) Gecko/20100101 Firefox/56.0',
     'Accept: */*',
     'Accept-Language: zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
     'Accept-Encoding: gzip, deflate',
     'Content-Type: application/x-www-form-urlencoded',
     'X-Requested-With: XMLHttpRequest',
     'Referer: http://weibo.com/u/5471246591?is_hot=1',
     'Content-Length: 57',
     'Cookie: your cookie###########################################',
     'Connection: keep-alive',
     '',
     'uid=5471246591&filter_type=1&status=1&interact=1&follow=1']
    '''

    # 移除黑名单:
    # In [317]: url2='http://weibo.com/aj/f/delblack?ajwvr=6'
    # In [318]: resp = s.post(url2, data={'uid':'5209943797'})

    # In [320]: resp.json()
    # Out[320]: {u'code': u'100000', u'data': {}, u'msg': u''}
    
    with open('uids.txt') as f:
        uids = [uid.strip() for uid in f.readlines()]

    with open('headers.txt') as f:
        lines = [i.strip() for i in f.readlines()]
    
    url = lines[0].split()[1] 
    
    headers = {}
    for line in lines[2:-2]:
        k, v = line.split(': ',1)  #:space
        headers[k] = v
        
    data=lines[-1]
    # datas = [re.sub(r'uid=\d+','uid=%s'%uid ,data) for uid in uids]
    payload = dict([i.split('=',1) for i in data.split('&')])
    # payloads = [payload.update({'uid': uid}) for uid in uids]
    # payloads = []
    # for uid in uids:
        # payload.update({'uid': uid})
        # payloads.append(dict(payload))  ### dict
    
    # for payload in payloads:
        # r = requests.post(url, headers=headers, data=payload)
    # r.text
    # {"code":"100000","msg":"\u96b1\u85cf\u6210\u529f","data":{}}        
    # In [287]: r.json()['code']
    # Out[287]: u'100000'


    s = requests.Session()
    s.headers = headers
    s.mount('http://', requests.adapters.HTTPAdapter(pool_connections=1000, pool_maxsize=1000))
    

    task_queue = Queue.Queue()        
    for uid in uids:
        task_queue.put(uid)
    
    threads = []
    for i in range(100):
        t = threading.Thread(target=block)  #args接收元组,至少(a,)
        threads.append(t)

    for t in threads:
        t.setDaemon(True) 
        t.start()  
      
    task_queue.join()    
    print 'task done'

        

 

posted @ 2017-10-11 12:21  my8100  阅读(574)  评论(0编辑  收藏  举报