.NET中HttpWebRequest模拟请求保持登录状态
最近碰到这样一个需求,ASP项目里需要从另一个API项目的接口中取数据。
我想了想,最方便也是最快速的方法的就是使用HttpWebRequest请求接口。
而这里有一个问题,就是API那边接口需要登录之后才能拿到完整的数据。
未登录价格显示不全:
由于之前未曾使用过HttpWebRequest,所以并不知道登录完成之后要怎样保持登录状态,进行接下来的请求;
花费了我两天时间才研究明白(本人菜鸡,大神轻喷),HttpWebRequest每次请求,服务端是不同的Session, 对于需要登录保持Session才能操作的页面,无法进行。这就要求程序每次请求在同一个Session中。
服务端每次识别客户端在同一个Session访问,是基于cookie中有SessionID值。每次访问服务端时, 服务端通过识别客户请求的cookie的SessionID值,来判断是否在同一个Session中。 那么就需要在拿列表数据之前,先请求一次登陆接口,然后拿到这次请求的Cookie。
HttpWebRequest帮助类:
1 public class HttpRequest 2 { 3 // 保持 Cookie 为同一 Cookie 值。 4 protected CookieContainer craboCookie = new CookieContainer(); 5 protected string _Uri = null; // 标识 Internet 资源的 URI 6 protected string _Referer = null; // 标识 Internet 资源请求的 Referer 7 protected string _Headers = null; // 标识 Internet 资源请求的 Header 8 // 标识 Internet 资源请求的 Accept 9 protected string _Accept = "image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, application/x-shockwave-flash, application/vnd.ms-excel, application/vnd.ms-powerpoint, application/msword, */*"; 10 protected List<string> cookiesstrs = null; 11 protected string cookiesstr = null; 12 protected CookieCollection cookieheader = null; 13 14 protected string _Method = null; // 标识 Internet 资源请求的 Method 15 protected string _Data = null; // POST 请求时的数据 16 protected string _CharacterSet = null; // 响应的字符集 17 protected HttpStatusCode _StatusCode; // 响应状态 18 protected StringBuilder _Html_Text = new StringBuilder(); 19 public bool IsException = false; 20 public int TimeOut = 20000; 21 public HttpRequest(String encode = "GB2312") 22 { 23 // 24 // TODO: 在此处添加构造函数逻辑 25 // 26 cookiesstrs = new List<string>(); 27 _CharacterSet = encode; 28 } 29 /// <summary> 30 /// 对 Internet 资源发起 GET 请求 31 /// </summary> 32 /// <param name="requestUriString">标识 Internet 资源的URI</param> 33 /// <param name="requestReferer">标识 Internet 资源请求的Referer</param> 34 public bool OpenRequest(string requestUriString, string requestReferer) 35 { 36 _Uri = requestUriString; 37 _Referer = requestReferer; 38 _Method = "GET"; 39 return OpenRequest(); 40 } 41 /// <summary> 42 /// 对 Internet 资源发起 GET 请求 43 /// </summary> 44 /// <param name="requestUriString">标识 Internet资源的URI</param> 45 /// <param name="requestReferer">标识 Internet资源请求的Referer</param> 46 /// <param name="requestMethod">标识 Internet资源请求的 Post 数据</param> 47 public bool OpenRequest(string requestUriString, string requestReferer, string requestData) 48 { 49 _Uri = requestUriString; 50 _Referer = requestReferer; 51 _Data = requestData; 52 _Method = "POST"; 53 return OpenRequest(); 54 } 55 public bool OpenRequestNew(string requestUriString, string requestReferer, string requestData) 56 { 57 _Uri = requestUriString; 58 _Referer = requestReferer; 59 _Data = requestData; 60 _Method = "POST"; 61 return OpenRequestNew(); 62 } 63 public void SetCookieheader(string Cookie) 64 { 65 66 cookiesstrs.Add(Cookie); 67 68 } 69 70 /// <summary> 71 public string GetCookieheader() 72 { 73 return cookiesstr; 74 } 75 76 public void SetCookie(CookieCollection Cookie) 77 { 78 craboCookie.Add(Cookie); 79 } 80 public CookieCollection GetCookie() 81 { 82 return cookieheader; 83 } 84 85 /// 对 Internet 资源发起请求 86 /// </summary> 87 /// <returns></returns> 88 private bool OpenRequest() 89 { 90 // 清空已保留代码 91 _Html_Text.Remove(0, _Html_Text.Length); 92 // 创建 HttpWebRequest 实例 93 HttpWebRequest Request = (HttpWebRequest)WebRequest.Create(_Uri); 94 // 设置跟随重定向 95 Request.AllowAutoRedirect = true; 96 #region 判断 Uri 资源类型 97 { 98 Regex _RegexUri = new Regex("^https://", RegexOptions.IgnoreCase); 99 if (_RegexUri.IsMatch(_Uri)) 100 ServicePointManager.CertificatePolicy = new TrustAllCertificatePolicy(); 101 }; 102 #endregion 103 104 //System.Net.WebProxy proxy = new System.Net.WebProxy("172.168.2.254:8080", true); 105 //proxy.Credentials = new NetworkCredential("linxy", "lxy123456"); 106 //Request.Proxy = proxy; 107 108 Request.Timeout = TimeOut; 109 Request.Method = _Method; 110 Request.Accept = _Accept; 111 Request.UserAgent = "Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.9.1.2) Gecko/20090803 Fedora/3.5.2-2.fc11 Firefox/3.5.2"; 112 Request.Referer = _Referer; 113 Request.Proxy = null; 114 Request.CookieContainer = craboCookie; 115 119 if (_Method == "POST") 120 { 121 #region 请求为 POST 122 Request.ContentType = "application/x-www-form-urlencoded"; 123 124 byte[] Bytes = Encoding.GetEncoding(_CharacterSet).GetBytes(_Data); 125 Request.ContentLength = Bytes.Length; 126 using (Stream writer = Request.GetRequestStream()) 127 { 128 writer.Write(Bytes, 0, Bytes.Length); 129 writer.Close(); 130 } 131 #endregion 132 } 133 #region 读取返回数据 134 // 设置返回值变量 135 bool bResult = true; 136 try 137 { 138 HttpWebResponse Response = (HttpWebResponse)Request.GetResponse(); 139 cookiesstr = Response.Headers["Set-Cookie"]?.ToString(); 140 cookieheader = Request.CookieContainer.GetCookies(new Uri(_Uri)); 141 // 获取状态值 142 _StatusCode = Response.StatusCode; 143 if (_StatusCode == HttpStatusCode.OK) 144 { 145 146 // 判断页面编码 147 string ContentEncoding = Response.ContentEncoding.ToLower(); 148 switch (ContentEncoding) 149 { 150 case "gzip": 151 152 break; 153 default: 154 using (Stream reader = Response.GetResponseStream()) 155 { 156 StreamReader sr = new StreamReader(reader, Encoding.GetEncoding(CharacterSet)); 157 _Html_Text.Append(sr.ReadToEnd()); 158 sr.Close(); 159 reader.Close(); 160 try 161 { 162 _doc.RemoveAll(); 163 _doc.LoadXml(_Html_Text.ToString().Trim()); 164 } 165 catch{} 166 } 167 break; 168 } 169 } 170 else 171 bResult = false; 172 } 173 catch (Exception pEx) 174 { 175 IsException = true; 176 _Html_Text.Append(pEx.Message); 177 return false; 178 } 179 #endregion 180 return bResult; 181 } 182 private bool OpenRequestNew() 183 { 184 // 清空已保留代码 185 _Html_Text.Remove(0, _Html_Text.Length); 186 // 创建 HttpWebRequest 实例 187 HttpWebRequest Request = (HttpWebRequest)WebRequest.Create(_Uri); 188 // 设置跟随重定向 189 Request.AllowAutoRedirect = true; 190 #region 判断 Uri 资源类型 191 { 192 Regex _RegexUri = new Regex("^https://", RegexOptions.IgnoreCase); 193 if (_RegexUri.IsMatch(_Uri)) 194 ServicePointManager.CertificatePolicy = new TrustAllCertificatePolicy(); 195 }; 196 #endregion 197 198 //System.Net.WebProxy proxy = new System.Net.WebProxy("172.168.2.254:8080", true); 199 //proxy.Credentials = new NetworkCredential("linxy", "lxy123456"); 200 //Request.Proxy = proxy; 201 202 Request.Timeout = TimeOut; 203 Request.Method = _Method; 204 Request.Accept = _Accept; 205 Request.UserAgent = "Mozilla/5.0 (X11; U; Linux i686; zh-CN; rv:1.9.1.2) Gecko/20090803 Fedora/3.5.2-2.fc11 Firefox/3.5.2"; 206 Request.Referer = _Referer; 207 Request.Proxy = null; 208 209 foreach (var cookie in cookiesstrs) 210 { 211 Request.Headers.Add("Cookie", cookie); 212 } 213 214 if (_Method == "POST") 215 { 216 #region 请求为 POST 217 Request.ContentType = "application/x-www-form-urlencoded"; 218 219 byte[] Bytes = Encoding.GetEncoding(_CharacterSet).GetBytes(_Data); 220 Request.ContentLength = Bytes.Length; 221 using (Stream writer = Request.GetRequestStream()) 222 { 223 writer.Write(Bytes, 0, Bytes.Length); 224 writer.Close(); 225 } 226 #endregion 227 } 228 #region 读取返回数据 229 // 设置返回值变量 230 bool bResult = true; 231 try 232 { 233 HttpWebResponse Response = (HttpWebResponse)Request.GetResponse(); 234 235 // 获取状态值 236 _StatusCode = Response.StatusCode; 237 if (_StatusCode == HttpStatusCode.OK) 238 { 239 240 // 判断页面编码 241 string ContentEncoding = Response.ContentEncoding.ToLower(); 242 switch (ContentEncoding) 243 { 244 case "gzip": 245 246 break; 247 default: 248 using (Stream reader = Response.GetResponseStream()) 249 { 250 StreamReader sr = new StreamReader(reader, Encoding.GetEncoding(CharacterSet)); 251 _Html_Text.Append(sr.ReadToEnd()); 252 sr.Close(); 253 reader.Close(); 254 try 255 { 256 _doc.RemoveAll(); 257 _doc.LoadXml(_Html_Text.ToString().Trim()); 258 } 259 catch { } 260 } 261 break; 262 } 263 cookiesstr = craboCookie.GetCookieHeader(Request.RequestUri); //把cookies转换成字符串 264 } 265 else 266 bResult = false; 267 } 268 catch (Exception pEx) 269 { 270 IsException = true; 271 _Html_Text.Append(pEx.Message); 272 return false; 273 } 274 #endregion 275 return bResult; 276 } 277 private XmlDocument _doc = new XmlDocument(); 278 279 public XmlDocument Doc 280 { 281 get 282 { 283 return _doc; 284 } 285 } 286 287 288 /// <summary> 289 /// 获取或设置 Uri 资源字符集 290 /// </summary> 291 public string CharacterSet 292 { 293 get 294 { 295 return _CharacterSet; 296 } 297 set 298 { 299 _CharacterSet = value; 300 } 301 } 302 /// <summary> 303 /// 获取或设置 Uri 资源标识 304 /// </summary> 305 public string RequestUriString 306 { 307 get 308 { 309 return _Uri; 310 } 311 set 312 { 313 _Uri = value; 314 } 315 } 316 /// <summary> 317 /// 获取或设置 Uri 资源请求 Accept 318 /// </summary> 319 public string Accept 320 { 321 get 322 { 323 return _Accept; 324 } 325 set 326 { 327 _Accept = value; 328 } 329 } 330 public string HtmlDocument 331 { 332 get 333 { 334 return _Html_Text.ToString(); 335 } 336 } 337 338 339 }
HttpWebrequest设置cookie可以用 以下两种方式:
- Request.CookieContainer = craboCookie;
- Request.Headers.Add("Cookie", cookie);
调用方法:
HttpRequest hr = new HttpRequest("UTF-8"); string strUrl = APIUrl + "/IAccount/Login"; string strUrlPan = string.Format("customerID={0}&md5Pwd={1}&loginType={2}&source={3}", txtUid.Text.Trim(), Util.GetMD5(txtPwd.Text.Trim()), 1, 3); hr.OpenRequest(strUrl + "?" + strUrlPan, ""); //缓存登录Cookie Session["Cookieheader"] = hr.GetCookie(); var rb = Newtonsoft.Json.JsonConvert.DeserializeObject<LoginResponseData>(hr.HtmlDocument.Trim()); //保存站点 CookieUtil.ClearCookie("ChangeWebSiteSysNo"); CookieUtil.SetCookie("ChangeWebSiteSysNo", rb.data.CustomerInfo.WebSiteSysNo.ToString()); //保存登录用户标识 string cookiestr = hr.GetCookieheader(); string str = getCookie(cookiestr, "path=/,userGuid").Replace("path=/,", ""); var cookieArr = str.Split('='); CookieUtil.ClearCookie("userGuid"); CookieUtil.SetCookie("userGuid", cookieArr.Length > 0 ? cookieArr[1] : "");
这里有一个比较坑的就是,因为我那边API项目不仅要验证是否登录还要验证用户唯一标识,所以我这里将userGuid也缓存了起来在下一次请求时一并传过去。
然后再另起一个请求拿数据
//设置请求登录Cookie Icson.Utils.HttpRequest hr1 = new Icson.Utils.HttpRequest("UTF-8"); System.Net.CookieCollection Cookies = new System.Net.CookieCollection(); if (Session["Cookieheader"] != null) Cookies.Add((System.Net.CookieCollection)Session["Cookieheader"]); Cookies.Add(new System.Net.Cookie("userGuid", CookieUtil.GetCookie("userGuid"), "/", APIUrl.Replace("http://", ""))); hr1.SetCookie(Cookies); //请求商品列表 string strUrl = APIUrl + "/IProductList/ListNew"; string strUrlPan = string.Format("c1SysNo={0}&c2SysNo={1}&c3SysNo={2}&extCode={3}&filter={4}&limit=30&madeIn=-1&offset={5}&query={6}&sType=1&sort=8&source=3&systemversion=9&version=3.2.2", ddlC1.SelectedItem?.Value.TryInt() < 0 ? "" : ddlC1.SelectedItem?.Value, ddlC2.SelectedItem?.Value.TryInt() < 0 ? "" : ddlC2.SelectedItem?.Value, "", 0, "", 0, txtProductName.Text.Trim()); hr1.OpenRequest(strUrl + "?" + strUrlPan, "", "");
这样就可以拿到完整的列表数据了
以上就是一次HttpWebRequest踩坑经历,希望各位遇到能够避免在这上面浪费过多时间。