Python 3 csdn 블 로그 방문 량

1. python 3 requests 가 요청 을 해서 글 의 읽 기 수 를 가 져 옵 니 다:
request = requests.get(referer,headers=headers,proxies={"http": proxy})
html = request.content
etree = lxml.html.etree
content = etree.HTML(html)
#   xpath     
read_num = content.xpath('//span[@class="read-count"]/text()')

2. ime. sleep () 를 설정 합 니 다.장기 테스트 를 통 해 가장 좋 은 시간 은 6s 이다.
time.sleep(6)

3. ip 이 차단 되 는 것 을 방지 하고 무 작위 접근 과 ip 대 리 를 설정 합 니 다.
        headers = {"User-Agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"}
        request = requests.get("https://www.kuaidaili.com/free/inha/"+str(self.page), headers=headers)
        html = request.content
        etree = lxml.html.etree
        content = etree.HTML(html)
        #print html
        ip = content.xpath('//td[@data-title="IP"]/text()')
        port = content.xpath('//td[@data-title="PORT"]/text()')

전체 프로그램 원본:
# coding:utf-8
import lxml.html
import random
import time
import requests

 
class CsdnSpider():
    USER_AGENTS = [
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
        'Opera/8.0 (Windows NT 5.1; U; en)',
        'Mozilla/5.0 (Windows NT 5.1; U; en; rv:1.8.1) Gecko/20061208 Firefox/2.0.0 Opera 9.50',
        'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50',
        'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
        'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
        'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
        'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
        'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)',
        'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
        'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0)',
        'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/38.0.2125.122 UBrowser/4.0.3214.0 Safari/537.36'
    ]
    url_list = [
        "https://blog.csdn.net/qq_24347541/article/details/88737370",
        "https://blog.csdn.net/qq_24347541/article/details/88734308",
        "https://blog.csdn.net/qq_24347541/article/details/88709917",
        "https://blog.csdn.net/qq_24347541/article/details/88668274",
        "https://blog.csdn.net/qq_24347541/article/details/88655659",
        "https://blog.csdn.net/qq_24347541/article/details/88532762",
		"https://blog.csdn.net/qq_24347541/article/details/88531973",
		"https://blog.csdn.net/qq_24347541/article/details/88793014",
		"https://blog.csdn.net/qq_24347541/article/details/88795543",
		"https://blog.csdn.net/qq_24347541/article/details/88813899",
		"https://blog.csdn.net/qq_24347541/article/details/88821545",
		"https://blog.csdn.net/qq_24347541/article/details/88849313",
		"https://blog.csdn.net/qq_24347541/article/details/88865019",
		"https://blog.csdn.net/qq_24347541/article/details/88888628",
		"https://blog.csdn.net/qq_24347541/article/details/88966367",
		"https://blog.csdn.net/qq_24347541/article/details/88992309",
		"https://blog.csdn.net/qq_24347541/article/details/89018114",
		"https://blog.csdn.net/qq_24347541/article/details/89139095",
		"https://blog.csdn.net/qq_24347541/article/details/89242350",
		"https://blog.csdn.net/qq_24347541/article/details/89354230",
		"https://blog.csdn.net/qq_24347541/article/details/89400343",
		"https://blog.csdn.net/qq_24347541/article/details/89475722",
		"https://blog.csdn.net/qq_24347541/article/details/89631592",
		"https://blog.csdn.net/qq_24347541/article/details/89710562",
		"https://blog.csdn.net/qq_24347541/article/details/89874646",
		"https://blog.csdn.net/qq_24347541/article/details/89949423",
		"https://blog.csdn.net/qq_24347541/article/details/90201741",
        "https://blog.csdn.net/qq_24347541/article/details/90265346",
        "https://blog.csdn.net/qq_24347541/article/details/90408075",
        "https://blog.csdn.net/qq_24347541/article/details/90600742",
        "https://blog.csdn.net/qq_24347541/article/details/90638278"
    ]
    def __init__(self):
        self.page = 0
        self.proxy = []
    def get_proxy(self):
        self.page+=1
        headers = {"User-Agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_0) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11"}
        request = requests.get("https://www.kuaidaili.com/free/inha/"+str(self.page), headers=headers)
        html = request.content
        etree = lxml.html.etree
        content = etree.HTML(html)
        #print html
        ip = content.xpath('//td[@data-title="IP"]/text()')
        port = content.xpath('//td[@data-title="PORT"]/text()')
        #     ip port    
        for i in range(len(ip)):
            for p in range(len(port)):
                if i == p:
                    if ip[i] + ':' + port[p] not in self.proxy:
                        self.proxy.append(ip[i] + ':' + port[p])
        # print self.proxy
        if self.proxy:
            print ("this use" + str(self.page) + "page IP")
            self.spider()
 
    def spider(self):
        num = 0 #       
        err_num = 0 #        
        while True:
            #         UA   
            user_agent = random.choice(self.USER_AGENTS)
            proxy = random.choice(self.proxy)

            referer = random.choice(self.url_list) #      url  
            headers = {
                "Host": "blog.csdn.net",
                "Connection": "keep-alive",
                "Cache-Control": "max-age=0",
                "Upgrade-Insecure-Requests": "1",
                "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
                "Referer": "https://blog.csdn.net/qq_24347541",
                "User-Agent": user_agent,
                "Accept-Language": "zh-CN,zh;q=0.9",
				"Cookie": "smidV2=2018061109563875e1d84afd822e152ccb71c406cf4e44009bb058182434650; uuid_tt_dd=10_37292961410-1553592218451-585832; UN=RUSH00000; Hm_ct_6bcd52f51e9b3dce32bec4a3997715ac=6525*1*10_37292961410-1553592218451-585832!5744*1*RUSH00000; acw_tc=2760824015580561068492252e3b68add4b06393ed45927064f907a6c5015c; __yadk_uid=szPOcQVFaUokWOOM2tzYYKteKJO5RAK6; acw_sc__v3=5ce219647dc08d09158df14c726ba51f46249bfd; acw_sc__v2=5ce21963f50279c4296b98184746d6a85a5f877d; dc_session_id=10_1558321509369.910879; Hm_lvt_6bcd52f51e9b3dce32bec4a3997715ac=1558054651,1558321511; SESSION=25351c44-6b87-48dc-b04b-a336aab6db77; UserName=RUSH00000; UserInfo=338ce29d15c840c98dbe43f5da028079; UserToken=338ce29d15c840c98dbe43f5da028079; UserNick=RUSH00000; AU=AAC; BT=1558321532133; dc_tos=prs8ld; Hm_lpvt_6bcd52f51e9b3dce32bec4a3997715ac=1558321538"
            }
            try:
                #     Handler     ,         ,            IP+PROT
                request = requests.get(referer,headers=headers,proxies={"http": proxy})
                html = request.content
                etree = lxml.html.etree
                content = etree.HTML(html)
                #   xpath     
                read_num = content.xpath('//span[@class="read-count"]/text()')
                #         
                new_read_num = ''.join(read_num)
                #   xpath      blog.csdn.net/qq_41782425/article/details/84934224             
                if len(new_read_num) != 0:
                    print (new_read_num)
 
                num += 1
                print ('The' + str(num) + 'few visits')
                print (request.url + "proxy ip: " + str(proxy))
                # print request.headers
                time.sleep(6)
                #        100 ,    ,   get_proxy          
                if num > 100:
                    break
            except Exception as result:
                err_num+=1
                print ("error message(%d):%s"%(err_num,result))
                #          30 ,       page,            ip,     
                if err_num >=30:
                    self.__init__()
                    break
        #       ,     get_proxy       
        print ("Re acquiring agent proxy IP")
        self.get_proxy()
 
 
if __name__ == "__main__":
    CsdnSpider().get_proxy()
 
 
 
 



linux 에서 실행 방법: nohup python - u CsdnSpider. py > my. log 2 > & 1 &

좋은 웹페이지 즐겨찾기