Python边学边用--BT客户端实现之(二)Peers获取
解析完torrent文件之后,需要从Tracker服务器获取拥有每个piece的peer列表。
tracker服务器使用http协议提供支持,使用get的方式获取请求内容。也有使用udp协议的,暂时还没有分析。
get 格式如下:
announce-url?info_hash=xxxxxxxxxxxxxxxxxxxxx,peer_id=xxxxxxxxxxxxxxxxxxxx,ip=x.x.x.x,port=xxxx,uploaded=xx,downloaded=xx,left=xx,event=x
url中各参数需要经过url扰码处理。
其中,info_hash为torrent文件中info属性的value部分(bencode格式)的SHA1 hash,peer_id为任务启动时bt客户端为自己随即分配的20字节的ID, ip为本机外网ip,port为bt客户端监听的端口,uploaded,downloaded,left为本节点的下载属性。event为可选字段,包括 started,completed,stoped
tracker服务器的返回内容为经过bencode的dict类型,如果返回失败,会包含"failure reason" 键值,内容为可读的失败原因。
如果返回成功会包含以下几项内容:
"interval" --像tracker服务器发起常规查询的间隔时间(我理解为peers信息的刷新时间,自己需要定期向tracker报告下载状态,同时获取最新的peer信息)
"peers" --为一个peer list,每个peer包含以下几项内容:
"peer id" --20字节的peer自分配id
"ip" --peer ip地址
"port" --peer 监听端口
关于peers项,当前大多支持bep-23中第一的compact peers以减少返回内容的长度,该该格式下peers属性为一个string
每六个字节为一组,每组表示一个peer,其中前4个字节 表示peer ip,后两个字节表示peer port,均为大头模式表示。没有peer id,但是不影响后面的下载操作。
1 ''' 2 Created on 2012-10-2 3 4 @author: ddt 5 ''' 6 import httplib 7 import urllib 8 import bcodec 9 import re 10 import torrent_file 11 import threading 12 13 class TrackersInfo(object): 14 ''' 15 TODO: UDP tracker support 16 TODO: tracker polling support 17 ''' 18 19 __info_hash = '' 20 21 __peer_id = None 22 __host_ip = None 23 __host_port = None 24 25 __trackers = [] 26 __tracker_check_timeout = 0 #second 27 __tracker_get_timeout = 0 #second 28 __tracker_max_retrys = 0 29 30 def __init__(self, announce_list, host_info, info_hash): 31 ''' 32 Constructor 33 ''' 34 self.__info_hash = info_hash 35 self.__peer_id, self.__host_ip, self.__host_port = host_info 36 37 self.__tracker_check_timeout = 3 #second 38 self.__tracker_get_timeout = 10 #second 39 self.__tracker_max_retrys = 2 40 41 #init trackers 42 for tier in announce_list: 43 for announce in tier: 44 tracker = {} 45 tracker_addr = self.__get_tracker_addr(announce) 46 if tracker_addr == None: 47 continue 48 tracker['addr'] = tracker_addr 49 tracker['rsp'] = None 50 tracker['retrys'] = 0 51 tracker['error'] = '' 52 self.__trackers.append(tracker) 53 pass 54 55 def get_peers(self): 56 peers = [] 57 for tracker in self.__trackers: 58 rsp = tracker['rsp'] 59 if rsp != None: 60 for peer in rsp['peers']: 61 if peer not in peers: 62 peers.append(peer) 63 return peers 64 65 def refresh_trackers(self, download_state, refresh_intvl): 66 rsp_msg = None 67 thread_list = [] 68 69 for tracker in self.__trackers: 70 rsp = tracker['rsp'] 71 if rsp != None: 72 rsp['interval'] -= refresh_intvl 73 if rsp['interval'] > 0: 74 continue 75 if rsp['interval'] < 0: 76 rsp['interval'] = 0 77 78 if tracker['retrys'] < self.__tracker_max_retrys: 79 thread = threading.Thread(target=TrackersInfo.__request_tracker,args=(self,tracker,download_state)) 80 thread.start() 81 thread_list.append(thread) 82 #self.__request_tracker(tracker, download_state) 83 for thread in thread_list: 84 thread.join() 85 pass 86 87 def __request_tracker(self, tracker, download_state): 88 print "request_tracker: ", tracker['addr'] 89 90 web_addr,web_port,page_url = tracker['addr'] 91 tracker_con = httplib.HTTPConnection(web_addr, web_port,timeout=self.__tracker_get_timeout) 92 piece_request = self.__generate_request(download_state) 93 if not page_url: 94 page_url = '' 95 url = page_url + '?' + piece_request 96 print 'http://'+web_addr+url 97 try: 98 tracker_con.request("GET", url) 99 response = tracker_con.getresponse() 100 print response.status, response.reason 101 if response.reason.upper() != 'OK': 102 print 'Get tracker info error:%s! tracker:%s' %(response.reason, tracker['addr']) 103 tracker_con.close() 104 tracker['error'] = response.reason 105 tracker['retrys'] += 1 106 return 107 msg_encoded = response.read() 108 tracker_con.close() 109 rsp_msg = bcodec.bdecode(msg_encoded) 110 if rsp_msg == None: 111 print 'Get tracker info error:%s! tracker:%s' %(msg_encoded, tracker['addr']) 112 tracker['error'] = msg_encoded 113 tracker['retrys'] += 1 114 return 115 116 except Exception,e: 117 tracker_con.close() 118 print 'Get tracker info error:%s! tracker:%s' %(e.message, tracker['addr']) 119 tracker['error'] = e.message 120 tracker['retrys'] += 1 121 return 122 123 if 'failure reason' in rsp_msg.keys(): 124 print 'Get tracker info error:%s! tracker:%s' %(rsp_msg['failure reason'], tracker['addr']) 125 tracker['error'] = rsp_msg['failure reason'] 126 tracker['retrys'] += 1 127 return 128 print rsp_msg 129 peers_msg = rsp_msg['peers'] 130 peer_list = [] 131 if type(peers_msg) == type(''): 132 for i in range(0,len(peers_msg)-5,6): 133 one_peer = peers_msg[i:i+6] 134 #peer_id = '' 135 ip = one_peer[0:4] 136 port = one_peer[4:6] 137 ip = '%d.%d.%d.%d' %(ord(ip[0]),ord(ip[1]),ord(ip[2]),ord(ip[3]),) 138 port = ord(port[0])*256+ord(port[1]) 139 peer_list.append((ip,port)) 140 rsp_msg['peers'] = peer_list 141 print peer_list 142 elif type(peers_msg) == type([]): 143 for peer in peers_msg: 144 peer_list.append((peer['ip'],peer['port'])) 145 tracker['rsp'] = rsp_msg 146 tracker['retrys'] = 0 147 148 def __generate_request(self, download_state): 149 150 downloaded = download_state['downloaded'] 151 uploaded = download_state['uploaded'] 152 left = download_state['left'] 153 event = download_state['event'] 154 155 request = {} 156 request['info_hash'] = self.__info_hash 157 request['peer_id'] = self.__peer_id 158 request['ip'] = self.__host_ip 159 request['port'] = self.__host_port 160 request['uploaded'] = uploaded 161 request['downloaded'] = downloaded 162 request['left'] = left 163 request['event'] = event 164 request = urllib.urlencode(request) 165 return request 166 167 def __get_tracker_addr(self, announce): 168 tracker_addr = None 169 m = re.match(r'(http://)([^/,:]*)(:(\d*))?(/.*)?',announce) 170 if m != None: 171 web_addr = m.groups()[1] 172 web_port = m.groups()[3] 173 page_url = m.groups()[4] 174 else: 175 return None 176 177 if web_port != None: 178 web_port = int(web_port) 179 else: 180 web_port = 80 181 182 tracker_addr = (web_addr,web_port, page_url) 183 return tracker_addr 184 185 def __get_valid_tracker(self): 186 187 tracker_addr = None 188 189 if self.__annouce_list == None or len(self.__annouce_list) == 0: 190 return None 191 192 tracker_con = None 193 found_current = False 194 for tier in self.__annouce_list: 195 for announce in tier: 196 tracker_addr = self.__get_tracker_addr(announce) 197 if tracker_addr == None: 198 continue 199 200 (web_addr,web_port, page_url) = tracker_addr 201 202 tracker_con = httplib.HTTPConnection(web_addr, web_port,timeout=self.__tracker_check_timeout) 203 try: 204 tracker_con.connect() 205 except Exception, e: 206 print e 207 continue 208 209 tier.remove(announce) 210 tier.insert(0,announce) 211 tracker_con.close() 212 tracker_addr = (web_addr,web_port, page_url) 213 break 214 215 ## tiers sorting, none standard 216 if tracker_addr != None: 217 self.__annouce_list.remove(tier) 218 self.__annouce_list.insert(0,tier) 219 break 220 return tracker_addr 221 222 if __name__ == '__main__': 223 import down_load_task 224 filename = r".\narodo.torrent" 225 torrent = torrent_file.TorrentFile() 226 torrent.read_file(filename) 227 info_hash = (torrent.get_info_hash()) 228 print "info_hash: ", list(info_hash) 229 announce_list = torrent.get_announces() 230 peer_id = down_load_task._generate_peerid() 231 listening_addr = down_load_task._get_listening_addr() 232 host_info = (peer_id,) + listening_addr 233 print 'host_info: ', host_info 234 trackers = TrackersInfo(announce_list, host_info, info_hash) 235 download_state = {} 236 download_state['downloaded'] = 0 237 download_state['uploaded'] = 0 238 download_state['left'] = 364575 239 download_state['event'] = 'started' 240 trackers.refresh_trackers(download_state, 20) 241 peers = trackers.get_peers() 242 print peers
1 ''' 2 Created on 2012-10-2 3 4 @author: ddt 5 ''' 6 import httplib 7 import urllib 8 import bcodec 9 import re 10 11 class TrackerInfo(object): 12 ''' 13 TODO: multiple tracker support 14 TODO: UDP tracker support 15 TODO: tracker polling support 16 ''' 17 __announce_list = None 18 19 __piece_index = 0 20 21 __peer_id = None 22 __host_ip = None 23 __host_port = None 24 25 __tracker_addr = None 26 __tracker_rsp = None 27 28 __tracker_check_timeout = 0 #second 29 __tracker_get_timeout = 0 #second 30 31 def __init__(self, announce_list, host_info, piece_index): 32 ''' 33 Constructor 34 ''' 35 self.__annouce_list = announce_list 36 self.__piece_index = piece_index 37 self.__peer_id, self.__host_ip, self.__host_port = host_info 38 39 self.__tracker_check_timeout = 3 #second 40 self.__tracker_get_timeout = 10 #second 41 42 def refresh_tracker(self, piece_state, intvl): 43 rsp_msg = None 44 45 if self.__tracker_rsp != None: 46 self.__tracker_rsp['interval'] -= intvl 47 if self.__tracker_rsp['interval'] > 0: 48 return 49 else: 50 self.__tracker_rsp['interval'] = 0 51 52 53 if self.__tracker_addr == None: 54 self.__tracker_addr = self.__get_valid_tracker() 55 56 tracker_addr = self.__tracker_addr 57 58 while tracker_addr != None: 59 60 web_addr,web_port,page_url = self.__tracker_addr 61 62 tracker_con = httplib.HTTPConnection(web_addr, web_port,timeout=self.__tracker_get_timeout) 63 piece_request = self.__generate_request(piece_state) 64 if not page_url: 65 page_url = '' 66 url = page_url + '?' + piece_request 67 print 'http://'+web_addr+url 68 try: 69 tracker_con.connect() 70 tracker_con.request("GET", url) 71 response = tracker_con.getresponse() 72 print response.status, response.reason 73 if response.reason.upper() != 'OK': 74 print 'Get tracker info error:%s! piece_index:%d' %(response.reason, self.__piece_index) 75 tracker_con.close() 76 #tracker_addr = self.__get_valid_tracker(tracker_addr) 77 break 78 msg_encoded = response.read() 79 tracker_con.close() 80 rsp_msg = bcodec.bdecode(msg_encoded) 81 if rsp_msg == None: 82 print 'Get tracker info error:%s! piece_index:%d' %(msg_encoded, self.__piece_index) 83 #tracker_addr = self.__get_valid_tracker(tracker_addr) 84 break 85 86 except Exception,e: 87 tracker_con.close() 88 print 'Get tracker info error:%s! piece_index:%d' %(e.message, self.__piece_index) 89 #tracker_addr = self.__get_valid_tracker(tracker_addr) 90 break 91 92 if 'failure reason' in rsp_msg.keys(): 93 print 'Get tracker info error:%s! piece_index:%d' %(rsp_msg['failure reason'], self.__piece_index) 94 #tracker_addr = self.__get_valid_tracker(tracker_addr) 95 break 96 97 peers_msg = rsp_msg['peers'] 98 peer_list = [] 99 if type(peers_msg) == type(''): 100 for i in range(0,len(peers_msg)-5,6): 101 one_peer = peers_msg[i:i+6] 102 peer_id = '' 103 ip = one_peer[0:4] 104 port = one_peer[4:6] 105 ip = '%d.%d.%d.%d' %(ord(ip[0]),ord(ip[1]),ord(ip[2]),ord(ip[3]),) 106 port = ord(port[0])*255+ord(port[1]) 107 peer_list.append({'peer_id':peer_id,'ip':ip,'port':port}) 108 rsp_msg['peers'] = peer_list 109 self.__tracker_addr = tracker_addr 110 self.__tracker_rsp = rsp_msg 111 break 112 pass 113 114 def __generate_request(self, piece_state): 115 116 downloaded = piece_state['downloaded'] 117 uploaded = piece_state['uploaded'] 118 left = piece_state['left'] 119 event = piece_state['event'] 120 request = {} 121 request['info_hash'] = piece_state['info_hash'] 122 request['peer_id'] = self.__peer_id 123 request['ip'] = self.__host_ip 124 request['port'] = self.__host_port 125 request['uploaded'] = uploaded 126 request['downloaded'] = downloaded 127 request['left'] = left 128 request['event'] = event 129 request = urllib.urlencode(request) 130 return request 131 132 def __get_valid_tracker(self, old_tracker=None): 133 134 tracker_addr = None 135 136 if self.__annouce_list == None or len(self.__annouce_list) == 0: 137 return None 138 139 tracker_con = None 140 found_current = False 141 for tier in self.__annouce_list: 142 for announce in tier: 143 #print tracker_addr 144 m = re.match(r'(http://)([^/,:]*)(:(\d*))?(/.*)?',announce) 145 if m != None: 146 web_addr = m.groups()[1] 147 web_port = m.groups()[3] 148 page_url = m.groups()[4] 149 else: 150 continue 151 152 if web_port != None: 153 web_port = int(web_port) 154 else: 155 web_port = 80 156 157 if old_tracker != None: 158 if not found_current: 159 if old_tracker == (web_addr,web_port,page_url): 160 found_current = True 161 continue 162 163 tracker_con = httplib.HTTPConnection(web_addr, web_port,timeout=self.__tracker_check_timeout) 164 try: 165 tracker_con.connect() 166 except Exception, e: 167 print e 168 continue 169 170 tier.remove(announce) 171 tier.insert(0,announce) 172 tracker_con.close() 173 tracker_addr = (web_addr,web_port, page_url) 174 break 175 176 ## tiers sorting, none standard 177 if tracker_addr != None: 178 self.__annouce_list.remove(tier) 179 self.__annouce_list.insert(0,tier) 180 break 181 return tracker_addr
posted on 2012-10-02 20:09 duandetao 阅读(2734) 评论(1) 编辑 收藏 举报