作業(yè)思路
在這次作業(yè)爬取的是新浪微博的用戶信息坝初,日爬取量25000個用戶凡伊,寄存在云服務(wù)器上運行(不過服務(wù)器每次都因為內(nèi)存不夠把進程給殺了唧席,汗...)
作業(yè)選取的網(wǎng)址是新浪微博手機版網(wǎng)頁擦盾,主要思路就是解析json數(shù)據(jù),不過函數(shù)之間的邏輯有點繞淌哟,也就是爬取用戶的粉絲的粉絲迹卢,重復(fù)進行,不過沒有加入去重函數(shù)和結(jié)束函數(shù)徒仓,思路看圖:
還有一個點需要注意的是如何使cookies保持長時間的有效腐碱,這是這個腳本的一個關(guān)鍵點,所以在這里選擇了requests里的session這個方法掉弛,可以更新請求的頭症见,而cookies就是在請求的頭里,經(jīng)過驗證殃饿,在爬取的過程中沒有遇到失效的情況谋作,倒是請求過快以及多次啟動腳本容易產(chǎn)生請求403的結(jié)果,也就是IP被封乎芳。更具體的解釋在代碼的注釋里有遵蚜。
作業(yè)結(jié)果
作業(yè)代碼
# -*- coding: utf-8 -*-
import requests
import json
import re
import time
import MySQLdb
# 這個是爬取一個用戶的粉絲的100個ID
def start_url(url):
s = requests.Session()
r = s.get(url, headers=headers)
totaldata = json.loads(r.text)
totalurl = []
# 這個是單頁里的粉絲的一層信息
for i in totaldata["cards"][0]["card_group"]:
item = {}
item["nickname"] = i["user"]["screen_name"]
item["id"] = i["user"]["id"]
item["fans"] = i["user"]["followers_count"]
item["follows"] = i["user"]["follow_count"]
item["url"] = i["user"]["profile_url"]
print "正在處理: " + str(i["user"]["id"])
time.sleep(2)
user_data(item)
totalurl.append(item["id"])
s.headers.update(headers)
for one in totalurl:
change_url(one)
#這個函數(shù)是生成新的用戶的URL,所以應(yīng)該是返回上一個函數(shù)奈惑,這樣可以解析指數(shù)增長的用戶
#這里是生成了10頁用戶的URL吭净,再將這10頁URL傳到上一個函數(shù)里,提取每一頁里的10個粉絲的信息
#這里所要保證的是傳入的每一個ID都是不同的肴甸,輸出來的是每一個ID所對應(yīng)的每一頁的URL
def change_url(page):
baseurl = "https://m.weibo.cn/api/container/getIndex?containerid=231051_-_fans_-_%s&luicode=10000011&lfid=1005053846717719&featurecode=20000180" %page
for one in range(1,11):
try:
url = baseurl + "&page={}".format(one)
time.sleep(2)
start_url(url)
except:
continue
#這個函數(shù)用來獲取用戶的更具體的信息寂殉,需要接受上面的生成器,一方面生成ID原在,一方面是接收一些信息來傳遞到存庫函數(shù)中保存起來友扰。
def user_data(user_id):
one_url = "http://m.weibo.cn/api/container/getIndex?containerid=230283%s_-_INFO" %user_id["id"]
two_url = "&title=%s" %user_id["nickname"]
three_url = "&luicode=10000011&lfid=230283%s&featurecode=20000180" %user_id["id"]
url = one_url + two_url + three_url
s = requests.Session()
r = s.get(url, headers=headers)
totaldata = json.loads(r.text)
i = totaldata["cards"][0]["card_group"]
data = {}
data["nickname"] = user_id["nickname"]
try:
data["biaoqian"] = i[3]["item_content"]
except:
data["biaoqian"] = "空"
try:
data["sex"] = i[1]["item_content"]
except:
data["sex"] = "未知"
try:
data["place"] = i[2]["item_content"]
except:
data["place"] = "未知"
data["lv"] = totaldata["cards"][1]["card_group"][0]["item_content"] #等級
try:
data["res_time"] = totaldata["cards"][1]["card_group"][2]["item_content"] #注冊時間
except:
data["res_time"] = "空"
data["id"] = user_id["id"]
data["fans"] = user_id["fans"]
data["follows"] = user_id["follows"]
data["url"] = user_id["url"]
print time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
insert(data)
def mysql_conn():
conn=MySQLdb.connect(
host='127.0.0.1',
user = "root",
passwd = "882645",
charset = "utf8",
db='Lagou',
port = 3306
)
return conn
#建表
def create_table():
create_sql='''
CREATE TABLE `weibos`(
LIST INT(11) NOT NULL AUTO_INCREMENT,
nickname VARCHAR(255),
biaoqian VARCHAR(255),
sex VARCHAR(255),
place VARCHAR(255),
id VARCHAR(255),
fans VARCHAR(255),
follows VARCHAR(255),
lv VARCHAR(255),
res_time VARCHAR(255),
url VARCHAR(255),
PRIMARY KEY (`LIST`)
)ENGINE=INNODB DEFAULT CHARSET =utf8
'''
conn=mysql_conn()
with conn:
cursor = conn.cursor()
cursor.execute(create_sql)
conn.commit()
#插入記錄
def insert(it):
sql='insert into weibos (nickname,biaoqian,sex,place,id,fans,follows,lv,res_time,url) values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)'
conn = mysql_conn()
with conn:
cursor = conn.cursor()
try:
cursor.execute(sql,(it['nickname'],it['biaoqian'],it['sex'],it['place'],it['id'],it['fans'], it['follows'],it["lv"],it["res_time"],it['url']))
cursor.connection.commit()
except BaseException as e:
print u"錯誤在這里>>>>", e, u"<<<<錯誤在這里"
conn.commit()
if __name__ == '__main__':
headers = { "Connection":"keep-alive",
"Accept":"application/json, text/plain, */*",
"X-Requested-With":"XMLHttpRequest",
"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.96 Safari/537.36",
"DNT":"1",
"Accept-Encoding":"gzip, deflate, sdch",
"Accept-Language":"zh-CN,zh;q=0.8",
"Cookie":"_T_WM=10f51ed94c4f4324c8adfaeeaf3b6c7a; ALF=1496672460; SCF=Asenm4RgczizSoBCdtR_GtVS4yjQ0rI-fnnSnq1Z5h60mMnfCGjbxHYIsFwmxprk8h9DOikPiXPXiPKHXPyqVPQ.; SUB=_2A250CsRrDeRhGeNJ7VsZ8i_PyTiIHXVX9OwjrDV6PUNbktBeLWPgkW0aj2ty_KahcQEYR7pWKwQDBQsJCg..; SUBP=0033WrSXqPxfM725Ws9jqgMF55529P9D9W5VPg2H0mI-Z6NLOPLMSPkK5JpX5KMhUgL.Fo-NSo.Reo20eoB2dJLoI7DhqPi79gyyMGW4; SUHB=0k1CYdpYNb60Hz; SSOLoginState=1494135867; H5_INDEX=0_all; H5_INDEX_TITLE=Carpehappy; M_WEIBOCN_PARAMS=featurecode%3D20000180%26oid%3D4103787444327240%26luicode%3D10000011%26lfid%3D231051_-_fans_-_2322168320"}
start = "http://m.weibo.cn/api/container/getIndex?containerid=231051_-_fans_-_2322168320&luicode=10000011&lfid=1005052322168320&featurecode=20000180&page=1"
# 創(chuàng)表
create_table()
start_url(start)
# 個人信息頁:https://m.weibo.cn/api/container/getIndex?containerid=2302833936433808_-_INFO&title=%25E5%259F%25BA%25E6%259C%25AC%25E4%25BF%25A1%25E6%2581%25AF&luicode=10000011&lfid=2302833936433808&featurecode=20000180
# 粉絲頁:https://m.weibo.cn/api/container/getIndex?containerid=231051_-_fans_-_3936433808&luicode=10000011&lfid=1005053936433808&featurecode=20000180&page=2
# 最多只能100多條信息
吐槽一下,為什么我的這個IP打開簡書特別慢晤斩,幾乎是打不開的狀態(tài)...焕檬,無奈換了個IP才登上,為了登個簡書也是夠拼的澳泵。