python-urllib2模块
参考:
http://blog.csdn.net/wklken/article/details/7364390
http://hankjin.blog.163.com/blog/static/3373193720105140583594/
1、基本用法:
1 res=urllib2.urlopen(url) 2 print res.read()
2. 加上要get或post的数据
1 data={"name":"hank", "passwd":"hjz"} 2 urllib2.urlopen(url, urllib.urlencode(data))
3. 加上http头
1 header={"User-Agent": "Mozilla-Firefox5.0"} 2 urllib2.urlopen(url, urllib.urlencode(data), header)
4. 加上session
1 cj = cookielib.CookieJar() 2 cjhandler=urllib2.HTTPCookieProcessor(cj) 3 opener = urllib2.build_opener(cjhandler) 4 urllib2.install_opener(opener)
5. 加上Basic认证
1 password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm() 2 top_level_url = "http://www.163.com/" 3 password_mgr.add_password(None, top_level_url, username, password) 4 handler = urllib2.HTTPBasicAuthHandler(password_mgr) 5 opener = urllib2.build_opener(handler) 6 urllib2.install_opener(opener)
6. 使用代理
1 proxy_support = urllib2.ProxyHandler({"http":"http://1.2.3.4:3128/"}) 2 opener = urllib2.build_opener(proxy_support) 3 urllib2.install_opener(opener)
7. 设置超时
socket.setdefaulttimeout(5)
或者
urllib2.urlopen(url, timeout=5)
大段代码参考:
1 #!/usr/bin/python 2 # -*- coding:utf-8 -*- 3 # urllib2_test.py 4 # author: wklken 5 # 2012-03-17 wklken@yeah.net 6 7 8 import urllib,urllib2,cookielib,socket 9 10 url = "http://www.testurl....." #change yourself 11 #最简单方式 12 def use_urllib2(): 13 try: 14 f = urllib2.urlopen(url, timeout=5).read() 15 except urllib2.URLError, e: 16 print e.reason 17 print len(f) 18 19 #使用Request 20 def get_request(): 21 #可以设置超时 22 socket.setdefaulttimeout(5) 23 #可以加入参数 [无参数,使用get,以下这种方式,使用post] 24 params = {"wd":"a","b":"2"} 25 #可以加入请求头信息,以便识别 26 i_headers = {"User-Agent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.1) Gecko/20090624 Firefox/3.5", 27 "Accept": "text/plain"} 28 #use post,have some params post to server,if not support ,will throw exception 29 #req = urllib2.Request(url, data=urllib.urlencode(params), headers=i_headers) 30 req = urllib2.Request(url, headers=i_headers) 31 32 #创建request后,还可以进行其他添加,若是key重复,后者生效 33 #request.add_header('Accept','application/json') 34 #可以指定提交方式 35 #request.get_method = lambda: 'PUT' 36 try: 37 page = urllib2.urlopen(req) 38 print len(page.read()) 39 #like get 40 #url_params = urllib.urlencode({"a":"1", "b":"2"}) 41 #final_url = url + "?" + url_params 42 #print final_url 43 #data = urllib2.urlopen(final_url).read() 44 #print "Method:get ", len(data) 45 except urllib2.HTTPError, e: 46 print "Error Code:", e.code 47 except urllib2.URLError, e: 48 print "Error Reason:", e.reason 49 50 def use_proxy(): 51 enable_proxy = False 52 proxy_handler = urllib2.ProxyHandler({"http":"http://proxyurlXXXX.com:8080"}) 53 null_proxy_handler = urllib2.ProxyHandler({}) 54 if enable_proxy: 55 opener = urllib2.build_opener(proxy_handler, urllib2.HTTPHandler) 56 else: 57 opener = urllib2.build_opener(null_proxy_handler, urllib2.HTTPHandler) 58 #此句设置urllib2的全局opener 59 urllib2.install_opener(opener) 60 content = urllib2.urlopen(url).read() 61 print "proxy len:",len(content) 62 63 class NoExceptionCookieProcesser(urllib2.HTTPCookieProcessor): 64 def http_error_403(self, req, fp, code, msg, hdrs): 65 return fp 66 def http_error_400(self, req, fp, code, msg, hdrs): 67 return fp 68 def http_error_500(self, req, fp, code, msg, hdrs): 69 return fp 70 71 def hand_cookie(): 72 cookie = cookielib.CookieJar() 73 #cookie_handler = urllib2.HTTPCookieProcessor(cookie) 74 #after add error exception handler 75 cookie_handler = NoExceptionCookieProcesser(cookie) 76 opener = urllib2.build_opener(cookie_handler, urllib2.HTTPHandler) 77 url_login = "https://www.yourwebsite/?login" 78 params = {"username":"user","password":"111111"} 79 opener.open(url_login, urllib.urlencode(params)) 80 for item in cookie: 81 print item.name,item.value 82 #urllib2.install_opener(opener) 83 #content = urllib2.urlopen(url).read() 84 #print len(content) 85 #得到重定向 N 次以后最后页面URL 86 def get_request_direct(): 87 import httplib 88 httplib.HTTPConnection.debuglevel = 1 89 request = urllib2.Request("http://www.google.com") 90 request.add_header("Accept", "text/html,*/*") 91 request.add_header("Connection", "Keep-Alive") 92 opener = urllib2.build_opener() 93 f = opener.open(request) 94 print f.url 95 print f.headers.dict 96 print len(f.read()) 97 98 if __name__ == "__main__": 99 use_urllib2() 100 get_request() 101 get_request_direct() 102 use_proxy() 103 hand_cookie()
测试wsgi接口的小例子:
1 url ='http://192.168.33.11:9008/getActivityInfo' 2 data = '''{"userid":"123","type":"1","flag":"t32"}''' 3 # 使用 urllib.urlencode会报错。。 4 # response = urllib2.urlopen(url, urllib.urlencode(data)) 5 response = urllib2.urlopen(url, data) 6 print response.getcode() 7 print response.read()