代码:
import requests
import time
import random
ip_list = ['117.135.132.107', '121.8.98.196', '194.116.198.212']
#http请求头信息
headers={
'Accept':'application/json, text/javascript, */*; q=0.01',
'Accept-Encoding':'gzip, deflate, br',
'Accept-Language':'zh-CN,zh;q=0.8',
'Connection':'keep-alive',
'Content-Length':'25',
'Content-Type':'application/x-www-form-urlencoded; charset=UTF-8',
'Cookie':'user_trace_token=20170214020222-9151732d-f216-11e6-acb5-525400f775ce; LGUID=20170214020222-91517b06-f216-11e6-acb5-525400f775ce; JSESSIONID=ABAAABAAAGFABEF53B117A40684BFB6190FCDFF136B2AE8; _putrc=ECA3D429446342E9; login=true; unick=yz; showExpriedIndex=1; showExpriedCompanyHome=1; showExpriedMyPublish=1; hasDeliver=0; PRE_UTM=; PRE_HOST=; PRE_SITE=; PRE_LAND=https%3A%2F%2Fwww.lagou.com%2F; TG-TRACK-CODE=index_navigation; Hm_lvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1494688520,1494690499,1496044502,1496048593; Hm_lpvt_4233e74dff0ae5bd0a3d81c6ccf756e6=1496061497; _gid=GA1.2.2090691601.1496061497; _gat=1; _ga=GA1.2.1759377285.1487008943; LGSID=20170529203716-8c254049-446b-11e7-947e-5254005c3644; LGRID=20170529203828-b6fc4c8e-446b-11e7-ba7f-525400f775ce; SEARCH_ID=13c3482b5ddc4bb7bfda721bbe6d71c7; index_location_city=%E6%9D%AD%E5%B7%9E',
'Host':'www.lagou.com',
'Origin':'https://www.lagou.com',
'Referer':'https://www.lagou.com/jobs/list_Python?',
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36',
'X-Anit-Forge-Code':'0',
'X-Anit-Forge-Token':'None',
'X-Requested-With':'XMLHttpRequest'
}
def get_json(url,page,lange_name):
#构造一个framdata数据
FramData = {'firts':'true','pn':page,'kd':lange_name}
#采用request是post方法,返回requests<200>,访问成功
JsonDatas = requests.post(url,FramData,headers=headers,proxies={'http': 'http://' + random.choice(ip_list)}).json()
#获取字典数据
#JsonDatas = jsonData.json()
return JsonDatas
def parser_json(page,JsonDatas):#JsonDatas数据库类型是字典
#total = int(JsonDatas['content']['positionResult']['totalCount'])
companyInfos = []
#获取招聘信息的公司,列表类型
companyInfo = JsonDatas['content']['positionResult']['result']
#对每一个公司遍历
print("正在解析{0}页招聘信息".format(page))
for company in companyInfo:
#定义一个列表,暂时存储一个公司信息
comInfo = []
#公司所在城市
if company['district'] is not None:
city = company['city'] + '-' + company['district']
else:
city = company['city']
#print(city)
comInfo.append(city)
# 职位名称
positionName = company['positionName']
#print(positionName)
comInfo.append(positionName)
#获取公司名称
companyFullName = company['companyFullName']+ '(' + company['companyShortName'] + ')'
#print(companyFullName)
comInfo.append(companyFullName)
#要求学历
education = company['education']
#print(education)
comInfo.append(education)
#职位类型
jobNature = company['jobNature']
#print(jobNature)
comInfo.append(jobNature)
#职位待遇
positionAdvantages = company['positionAdvantage']
positionAdvantage = positionAdvantages.replace(',',';').replace(',',';')
#print(positionAdvantage)
comInfo.append(positionAdvantage)
#工资
salary = company['salary']
#print(salary)
comInfo.append(salary)
#经验要求
workYear = company['workYear']
comInfo.append(workYear)
#分布时间
time = company['createTime']
comInfo.append(time)
#将每个公司的信息加入companyInfos中
companyInfos.append(comInfo)
print("第{0}页解析完成".format(page))
return companyInfos
def writeCSV(page,fw,companyInfos):
for companyInfo in companyInfos:
#print(companyInfo)
fw.write(",".join(companyInfo)+'\n')
print("第{0}页数据写入完毕".format(page))
def main():
path = 'F:' # 文件存储路径
start_page = 1
end_page = 20 #默认
lange_name = input("请输入要所有的职位:")
city = input("请输入工作地点:")
#创建文件
fw = open(path + '\lagou_' + lange_name + '.csv', 'a+')
#构造url链接
start_url = 'https://www.lagou.com/jobs/positionAjax.json?px=default&city='
end_url = '&needAddtionalResult=false&isSchoolJob=0'
url=start_url + city + end_url
page = start_page
row = ['工作地点','职位名称', '公司名称', '要求学历', '工作性质', '工作福利', '薪水', '工作经验要求','发布时间']
fw.write(",".join(row) + '\n')
while page < end_page:
time.sleep(12)
print("正在抓取第{0}页招聘数据信息".format(page))
#获取json数据
JsonDatas = get_json(url,page,lange_name)
#对获取的数据进行解析
companyInfos = parser_json(page,JsonDatas)
#将信息写入CSV文件中
writeCSV(page,fw,companyInfos)
page = page+1
print("所有数据写入完毕")
if __name__ == '__main__':
main()