Nginx 添加防爬虫

include agent_deny.conf;

conf下添加

#禁止Scrapy|curl等工具的抓取

if ($http_user_agent ~* (Scrapy|Curl|HttpClient))

{

    return 403;

}

 

#禁止指定UA及UA为空的访问

 

if ($http_user_agent ~ "FeedDemon|JikeSpider|Indy Library|Alexa Toolbar|AskTbFXTV|AhrefsBot|CrawlDaddy|CoolpadWebkit|Java|Feedly|UniversalFeedParser|ApacheBench|Microsoft URL Control|Swiftbot|ZmEu|oBot|jaunty|Python-urllib|lightDeckReports Bot|YYSpider|DigExt|YisouSpider|HttpClient|MJ12bot|heritrix|EasouSpider|Ezooms|^$" )

{

    return 403;            

}

#禁止非GET|HEAD|POST方式的抓取

if ($request_method !~ ^(GET|HEAD|POST)$)

{

  return 403;

}

posted @ 2021-01-08 19:20  韩憨  阅读(299)  评论(0编辑  收藏  举报
//看板娘