十年网站开发经验 + 多家企业客户 + 靠谱的建站团队
量身定制 + 运营维护+专业推广+无忧售后,网站问题一站解决
平时当我们需要爬取一些我们需要的数据时,总是有些网站禁止同一IP重复访问,这时候我们就应该使用代理IP,每次访问前伪装自己,让“敌人”无法察觉。
成都创新互联公司网站建设公司一直秉承“诚信做人,踏实做事”的原则,不欺瞒客户,是我们最起码的底线! 以服务为基础,以质量求生存,以技术求发展,成交一个客户多一个朋友!专注中小微企业官网定制,成都网站设计、网站制作、外贸营销网站建设,塑造企业网络形象打造互联网企业效应。oooooooooooooooOK,让我们愉快的开始吧!
这个是获取代理ip的文件,我将它们模块化,分为三个函数
注:文中会有些英文注释,是为了写代码方便,毕竟英文一两个单词就ok了
#!/usr/bin/python #-*- coding:utf-8 -*- """ author:dasuda """ import urllib2 import re import socket import threading findIP = [] #获取的原始IP数据 IP_data = [] #拼接端口后的IP数据 IP_data_checked = [] #检查可用性后的IP数据 findPORT = [] #IP对应的端口 available_table = [] #可用IP的索引 def getIP(url_target): patternIP = re.compile(r'(?<=)[\d]{1,3}\.[\d]{1,3}\.[\d]{1,3}\.[\d]{1,3}') patternPORT = re.compile(r'(?<= )[\d]{2,5}(?= )') print "now,start to refresh proxy IP..." for page in range(1,4): url = 'http://www.xicidaili.com/nn/'+str(page) headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64)"} request = urllib2.Request(url=url, headers=headers) response = urllib2.urlopen(request) content = response.read() findIP = re.findall(patternIP,str(content)) findPORT = re.findall(patternPORT,str(content)) #assemble the ip and port for i in range(len(findIP)): findIP[i] = findIP[i] + ":" + findPORT[i] IP_data.extend(findIP) print('get page', page) print "refresh done!!!" #use multithreading mul_thread_check(url_target) return IP_data_checked def check_one(url_check,i): #get lock lock = threading.Lock() #setting timeout socket.setdefaulttimeout(8) try: ppp = {"http":IP_data[i]} proxy_support = urllib2.ProxyHandler(ppp) openercheck = urllib2.build_opener(proxy_support) urllib2.install_opener(openercheck) request = urllib2.Request(url_check) request.add_header('User-Agent',"Mozilla/5.0 (Windows NT 10.0; WOW64)") html = urllib2.urlopen(request).read() lock.acquire() print(IP_data[i],'is OK') #get available ip index available_table.append(i) lock.release() except Exception as e: lock.acquire() print('error') lock.release() def mul_thread_check(url_mul_check): threads = [] for i in range(len(IP_data)): #creat thread... thread = threading.Thread(target=check_one, args=[url_mul_check,i,]) threads.append(thread) thread.start() print "new thread start",i for thread in threads: thread.join() #get the IP_data_checked[] for error_cnt in range(len(available_table)): aseemble_ip = {'http': IP_data[available_table[error_cnt]]} IP_data_checked.append(aseemble_ip) print "available proxy ip:",len(available_table)