- 哈嘍大家好,砸門又見面了
- 這次我這個菜雞爬的是拉勾網(wǎng)次哈,他的反爬機(jī)制讓我這個菜雞踩了好多坑胎署,不過我還是把他爬下來了,真是辛苦拉勾網(wǎng)的程序員了=-=
- 好了窑滞,話不多說上思路+代碼琼牧,這次我換個說法,我把思路也說上
- 剛開始我上了拉鉤后哀卫,頁面里也有信息巨坊,但是不太好弄,還要匹正則或者啥的此改,然后我想到之前在一個群里看到的趾撵,抓包,然后我就打開了f12看了看,還真有就是這個
- 這里是我們想要的占调,然后看頁面的結(jié)構(gòu)
-
這時把這個url地址單獨打開會發(fā)現(xiàn)
- 嗨呀暂题,驚不驚喜,意不意外究珊,這時薪者,就要把上邊說到的請求頭放到代碼中了。
user_agent = [
'Mozilla/5.0 (Windows NT 6.1; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; Trident/5.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 10_1_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B100 Safari/602.1',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:49.0) Gecko/20100101 Firefox/49.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0'
]
num = random.randint(0, 9) ##定義隨機(jī)函數(shù)
user_agent = user_agent[num] ##用隨機(jī)函數(shù)抽取
hearder = { ##然后是請求頭剿涮,下邊我是從我的請求頭復(fù)制的
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Connection': 'keep-alive',
'Content-Length': '25',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Host': 'www.lagou.com',
'Origin': 'https://www.lagou.com',
'Referer': 'https://www.lagou.com/jobs/list_python?labelWords=sug&fromSearch=true&suginput=p',
'User-Agent': user_agent,
'X-Anit-Forge-Code': '0',
'X-Anit-Forge-Token': 'None',
'X-Requested-With': 'XMLHttpRequest'
}
##這個cooking是模擬登陸的參數(shù)言津,是cookie
cooking = {
'cookie':這個好像要保密,所以這個是你的cookie取试。
}
##date就是post是的參數(shù)就是頁數(shù)和關(guān)鍵字了
dates = {
'first': 'false',
'kd': self.kd,
'pn': self.page
}
html = requests.post(self.url, headers=hearder,cookies=cooking, data=dates)
- 抓取html吧悬槽。下邊我上完整的代碼:
'''
這次我試著爬的拉勾網(wǎng)然后在分析
菜雞也想玩玩
此代碼作者:高佳樂
'''
import requests ##導(dǎo)入requests庫
import json ##導(dǎo)入json庫以便解析js
import random ##導(dǎo)入隨機(jī)函數(shù)庫
from openpyxl import Workbook ##導(dǎo)入這個庫,是管理excel
import time ##導(dǎo)入time是控制爬蟲的睡眠時間想括,不會睡眠的爬蟲就是耍流氓
class Reptilian(): ##定義類陷谱,這個單詞是爬蟲的意思,我百度翻譯瑟蜈,這樣更有b格
def __init__(self): ##結(jié)構(gòu)函數(shù)烟逊,本來我想弄全局變量,有點丑我就弄在了結(jié)構(gòu)函數(shù)
self.url = 'https://www.lagou.com/jobs/positionAjax.json?needAddtionalResult=false'##那個搜索頁的url
self.pages = int(input('請輸入你要獲取的前幾頁')) ##post的參數(shù)前多少頁
self.page = 1 ##從第一頁開始
self.kd = input('你要獲取的職位信息') ##post的關(guān)鍵字
self.number = 1 ##就是個計數(shù)器铺根,全局變量有點丑就放這了
self.shuju = Workbook() ##這行是打開一個excel
self.shuju_one = self.shuju.active ##這里是excel的第一個表
def headers(self): ##請求頭的結(jié)構(gòu)宪躯,下邊這個列表是放了10個user_agent然后下邊用隨機(jī)函數(shù)隨機(jī)抽取
user_agent = [
'Mozilla/5.0 (Windows NT 6.1; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (Windows NT 6.3; WOW64; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (X11; Linux x86_64; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.98 Safari/537.36',
'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; Trident/5.0)',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0.1 Safari/602.2.14',
'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Mozilla/5.0 (iPad; CPU OS 10_1_1 like Mac OS X) AppleWebKit/602.2.14 (KHTML, like Gecko) Version/10.0 Mobile/14B100 Safari/602.1',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:49.0) Gecko/20100101 Firefox/49.0',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:50.0) Gecko/20100101 Firefox/50.0',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0'
]
num = random.randint(0, 9) ##定義隨機(jī)函數(shù)
user_agent = user_agent[num] ##用隨機(jī)函數(shù)抽取
hearder = { ##然后是請求頭,下邊我是從我的請求頭復(fù)制的
'Accept': 'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Connection': 'keep-alive',
'Content-Length': '25',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Host': 'www.lagou.com',
'Origin': 'https://www.lagou.com',
'Referer': 'https://www.lagou.com/jobs/list_python?labelWords=sug&fromSearch=true&suginput=p',
'User-Agent': user_agent,
'X-Anit-Forge-Code': '0',
'X-Anit-Forge-Token': 'None',
'X-Requested-With': 'XMLHttpRequest'
}
##這個cooking是模擬登陸的參數(shù)位迂,是cookie
cooking = {
'cookie': 'user_trace_token=20180710103626-b4c2ffdc-1f66-4faf-9d75-4722b6cfd916; LGUID=20180710103627-0baf62d4-83ea-11e8-8271-525400f775ce; WEBTJ-ID=20180710140917-16482cef3b816-07cbfad53c941d-5b4b2b1d-1327104-16482cef3b91cb; _gat=1; PRE_UTM=m_cf_cpt_baidu_pc; PRE_HOST=www.baidu.com; PRE_SITE=https%3A%2F%2Fwww.baidu.com%2Fs%3Fie%3Dutf-8%26f%3D8%26rsv_bp%3D1%26rsv_idx%3D1%26tn%3Dbaidu%26wd%3D%25E6%258B%2589%25E5%258B%25BE%25E7%25BD%2591%26oq%3D%2525E7%252588%2525AC%2525E8%252599%2525AB%2525E6%25258B%252589%2525E5%25258B%2525BE%2525E7%2525BD%252591%26rsv_pq%3De97818190000b3ba%26rsv_t%3Deb2ei8ThN4xypS3meOdbjcF6svWBOdHFVTnNnKnHn64IbwKkxuhAYbl4Oxw%26rqlang%3Dcn%26rsv_enter%3D1%26inputT%3D437%26rsv_sug3%3D22%26rsv_sug1%3D13%26rsv_sug7%3D100%26rsv_sug2%3D0%26rsv_sug4%3D1747; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2Flp%2Fhtml%2Fcommon.html%3Futm_source%3Dm_cf_cpt_baidu_pc; TG-TRACK-CODE=index_search; JSESSIONID=ABAAABAABEEAAJAEF19E7498CA3C7D6C23289D4F4DAFC62; X_HTTP_TOKEN=a320c7314b39615089e7c8d4e844cdcd; _putrc=51B6FCDBAC8CA5C6123F89F2B170EADC; login=true; unick=%E6%8B%89%E5%8B%BE%E7%94%A8%E6%88%B72721; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=0; gate_login_token=25dac49e83fe12a32f74b689b48b5d7ce91e70cb80e78cb5ed521cb200b461e9; _ga=GA1.2.994010031.1531190203; _gid=GA1.2.814166295.1531190203; LGSID=20180710140902-be2e38ab-8407-11e8-8281-525400f775ce; LGRID=20180710141058-0329240f-8408-11e8-8281-525400f775ce; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1531190203,1531202958; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1531203074; SEARCH_ID=1c3bb17eded44f4ba841ca9a0b08909d; index_location_city=%E5%85%A8%E5%9B%BD'
}
##date就是post是的參數(shù)就是頁數(shù)和關(guān)鍵字了
dates = {
'first': 'false',
'kd': self.kd,
'pn': self.page
}
html = requests.post(self.url, headers=hearder,cookies=cooking, data=dates) ##html是post網(wǎng)址返回的json
return html
def josn(self): ##解析函數(shù)
html = self.headers() ##用上邊的函數(shù)返回json后
html = html.text ##返回的json用text打印出來
html = json.loads(html) ##然后用loads解析
return html ##返回解析后的html
def save(self): ##保存函數(shù)
shuju = self.shuju ##shuju是構(gòu)造函數(shù)里邊的exc
shuju.save(self.kd+'.xlsx') ##然后保存以關(guān)鍵字為名字的excel文件
def content(self): ##到了正文部分访雪,
while self.page<=self.pages: ##循環(huán),條件是頁數(shù)<交互給的總頁數(shù)
html = self.josn() ##html是解析后的json掂林,這個解析后的是每頁解析的
try: ##如果對
html_content = html['content'] ##html_contentH是html里的content鍵對應(yīng)的值
html_positionResult = html_content['positionResult'] ##html_positionResult是content里邊positionResult的值
html_result = html_positionResult['result'] ##result是positionResult里result對應(yīng)的值
self.shuju_one.title = '數(shù)據(jù)' ##excel表的第一個title是數(shù)據(jù)
for result in html_result: ##因為上邊返回的最后有個result值是裂變所以用for循環(huán)遍歷
positionName = result['positionName'] ##職位的名稱
endcation = result['education'] ##職位的學(xué)歷
city = result['city'] ##職位所在地
self.shuju_one['A%d' % self.number].value = positionName ##excel里A列的是職稱名稱臣缀,這里number是上邊的構(gòu)造里的以便是記錄第幾行
self.shuju_one['B%d' % self.number].value = endcation ##excel里B列的的是職稱學(xué)歷
self.shuju_one['C%d' % self.number].value = city ##excel里C列的是職稱所在地
self.number += 1 ##然后計數(shù)的也就是記錄幾行的+1
print('第%d頁保存完畢' % self.page) ##然后一頁后保存完畢
except:
print('不讓訪問') ##如果錯了就是不讓訪問
self.page+=1 ##然后頁數(shù)+1
sleep = random.randint(28, 32) ##休息28-32,因為不然只能爬前三頁泻帮,呸精置,不會睡眠的爬蟲就是耍流氓
time.sleep(sleep) ##睡眠
self.save() ##保存表
######################################################################################################################################################
shuju = Reptilian()
shuju.content()
好了代碼我也放上了,說說注意事項:
- 請求頭是要有的锣杂,cookie是要有的脂倦,post和get要區(qū)分
- 爬蟲要學(xué)會睡眠,前幾天我在一個群里還看到個:不會睡眠的爬蟲就是耍流氓=-=元莫。所以砸門要有禮貌
- 就是看好第二個
- 我說第三個說得對
好了放幾張數(shù)據(jù)圖(數(shù)據(jù)分析我是用的在線工具)
-
因為我是學(xué)ui設(shè)計的所以我查的是ui赖阻,以及它需要的學(xué)歷,和所在地區(qū)
稍微有點慢踱蠢,如果用了多線程火欧,多進(jìn)程會快吧,我還不會呢。哈哈
好了布隔,又到了說拜拜的時候了离陶,砸門下次見,過幾天見