以下是直接保存到数据库版本的
import pandas as pd
from bs4 import BeautifulSoup
import re
import time
import requests
import pymysql
import datetime
#请求页面方法
def getPage(url):
#设置请求头
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36'
}
#请求页面
html = requests.get(url,headers=headers)
#查看网页状态码,如果为200则为正常
if html.status_code == 200:
#返回网页文本内容
return html.text
else:
return None
def getWeekend(year,month,day):
week_list = ["星期一","星期二","星期三","星期四","星期五","星期六","星期日"]
return week_list[datetime.date(year,month,day).weekday()]
def concatDate(dateString):
date = re.findall('>(.*?)</a>',dateString)[0]
dateYear = date[:4]
dateMonth = date[4:6]
dateDay = date[6:]
dateString = dateYear+'-'+dateMonth+'-'+dateDay
weekendString = getWeekend(int(dateYear),int(dateMonth),int(dateDay))
return dateString,weekendString
def analysisPage(url,city):
html = getPage(url)
page = BeautifulSoup(html,'html.parser')
#气温
lowerTemperature,higherTemperature = [],[]
#日期
dateList,weekList = [],[]
#天气
weatherList = []
#风向
windList = []
dataList = page.find_all('tr')
for item in dataList:
date = item.find_all('a')
weatherInformation = item.find_all('td')
#temperature
if date!=[]:
date,weekend = concatDate(str(date))
dateList.append(date)
weekList.append(weekend)
temperatureInfo = weatherInformation[1]
temperatureInfo = re.findall('>(.*?)</td>',str(temperatureInfo))[0]
HigherTemperatureInfo = re.findall('~(.*?)℃',temperatureInfo)
lowerTemperatureInfo = re.findall('(.*?)~',temperatureInfo)
higherTemperature.append(HigherTemperatureInfo[0])
lowerTemperature.append(lowerTemperatureInfo[0])
weatherInfo = weatherInformation[2]
weatherInfo = re.findall('>(.*?)</td>',str(weatherInfo))[0]
weatherList.append(weatherInfo)
windDirection = weatherInformation[3]
windDirection = re.findall('>(.*?)</td>',str(windDirection))[0]
windLevel = weatherInformation[4]
windLevel = re.findall('>(.*?)</td>',str(windLevel))[0]
windList.append(windDirection+' '+windLevel)
else:
pass
#获取爬取时间
crawlingTime,cityList = [],[]
for i in range(len(dateList)):
crawlingTime.append(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
cityList.append(city)
res = {
'日期':dateList,
'星期':weekList,
'天气':weatherList,
'最高气温':higherTemperature,
'最低气温':lowerTemperature,
'风速信息':windList,
"城市":cityList,
"抓取时间":crawlingTime}
return res
#梳理数据 方便后面保存到数据库
def concatData(Dataframe):
res = []
for i in range(len(Dataframe)):
#print(list(Dataframe.iloc[i][:]))
res.append(list(Dataframe.iloc[i][:]))
return res
def operateSQL():
#创建数据库链接
db = pymysql.connect(
host="localhost",
port=3306,
user="root",
passwd='1234567',
db='studenttest',
charset='utf8'
)
# #创建数据库游标对象
cursor = db.cursor()
#创建数据表格
return cursor,db
def saveData(url,cityName,tableName):
# url = 'https://tianqi.2345.com/today-59493.htm'
res = analysisPage(url,cityName)
data = pd.DataFrame(res)
#保存数据
data.to_csv('Resfile{}_{}.csv'.format(cityName,time.strftime('%Y-%m-%d', time.localtime(time.time())),encoding='utf-8'))
#tableName = 'future_data'
cursor,db = operateSQL()
cursor.execute("CREATE TABLE IF NOT EXISTS %s ( ID INT AUTO_INCREMENT PRIMARY KEY, DATETIMES VARCHAR(255) , WEATHER VARCHAR(255) ,WEEKEND VARCHAR(255), LOWER_TEMPERATURE VARCHAR(255), HIGH_TEMPERATURE VARCHAR(255), WIND VARCHAR(255), CITY VARCHAR(255) ,CRAWLING_TIME DATE)"%tableName)
#keys = list(res.keys())
#cursor.execute("deleted from 表 where CRAWLING_TIME = '{}'"%time.strftime('%Y-%m-%d', time.localtime(time.time())))
keysBox = ['DATETIMES','WEEKEND','WEATHER','HIGH_TEMPERATURE','LOWER_TEMPERATURE','WIND','CITY','CRAWLING_TIME']
b = concatData(data)
for item in b:
#执行插入语句
sql="INSERT INTO {0} ({1},{2},{3},{4},{5},{6},{7},{8}) VALUES({9},{10},{11},{12},{13},{14},{15},{16})".format(tableName,keysBox[0],keysBox[1],keysBox[2],keysBox[3],keysBox[4],keysBox[5],keysBox[6],keysBox[7],repr(item[0]),repr(item[1]),repr(item[2]),repr(item[3]),repr(item[4]),repr(item[5]),repr(item[6]),repr(item[7]))
#sql="INSERT INTO test_data ({0},{1},{2},{3},{4},{5},{6},{7}) VALUES({8},{9},{10},{11},{12},{13},{14},{15})".format(keysBox[0],keysBox[1],keysBox[2],keysBox[3],keysBox[4],keysBox[5],keysBox[6],keysBox[7],repr(item[0]),repr(item[1]),repr(item[2]),repr(item[3]),repr(item[4]),repr(item[5]),repr(item[6]),repr(item[7]))
print('正在执行sql语句:%s'%sql)
print('--- --- ---')
#执行插入语句
cursor.execute(sql)
db.commit()
return None
def getHistoryWeather():
#cityList为需要爬取的城市列表的拼音
cityDict = {'长沙':'changsha','杭州':'hangzhou','南宁':'nanning','深圳':'shenzhen','苏州':'suzhou','西安':'xian','厦门':'xiamen'}
tableName = 'future_data'
cursor,db = operateSQL()
cursor.execute('DROP TABLE {}'%tableName)
for key,value in cityDict.items():
url = 'https://www.tianqishi.com/lishi/%s.html'%value
saveData(url,cityName=key,tableName=tableName)
time.sleep(5)
print('%s执行完毕'%key)
if __name__ == '__main__':
# url = 'https://www.tianqishi.com/lishi/changsha.html'
# res = analysisPage(url,'长沙')
# print(res)
getHistoryWeather()
以下是直接保存到本地csv文件的代码:
import pandas as pd
from bs4 import BeautifulSoup
import re
import time
import requests
import pymysql
import datetime
#请求页面方法
def getPage(url):
#设置请求头
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/105.0.0.0 Safari/537.36'
}
#请求页面
html = requests.get(url,headers=headers)
#查看网页状态码,如果为200则为正常
if html.status_code == 200:
#返回网页文本内容
return html.text
else:
return None
def getWeekend(year,month,day):
week_list = ["星期一","星期二","星期三","星期四","星期五","星期六","星期日"]
return week_list[datetime.date(year,month,day).weekday()]
def concatDate(dateString):
date = re.findall('>(.*?)</a>',dateString)[0]
dateYear = date[:4]
dateMonth = date[4:6]
dateDay = date[6:]
dateString = dateYear+'-'+dateMonth+'-'+dateDay
weekendString = getWeekend(int(dateYear),int(dateMonth),int(dateDay))
return dateString,weekendString
def analysisPage(url,city):
html = getPage(url)
page = BeautifulSoup(html,'html.parser')
#气温
lowerTemperature,higherTemperature = [],[]
#日期
dateList,weekList = [],[]
#天气
weatherList = []
#风向
windList = []
dataList = page.find_all('tr')
for item in dataList:
date = item.find_all('a')
weatherInformation = item.find_all('td')
#temperature
if date!=[]:
date,weekend = concatDate(str(date))
dateList.append(date)
weekList.append(weekend)
temperatureInfo = weatherInformation[1]
temperatureInfo = re.findall('>(.*?)</td>',str(temperatureInfo))[0]
HigherTemperatureInfo = re.findall('~(.*?)℃',temperatureInfo)
lowerTemperatureInfo = re.findall('(.*?)~',temperatureInfo)
higherTemperature.append(HigherTemperatureInfo[0])
lowerTemperature.append(lowerTemperatureInfo[0])
weatherInfo = weatherInformation[2]
weatherInfo = re.findall('>(.*?)</td>',str(weatherInfo))[0]
weatherList.append(weatherInfo)
windDirection = weatherInformation[3]
windDirection = re.findall('>(.*?)</td>',str(windDirection))[0]
windLevel = weatherInformation[4]
windLevel = re.findall('>(.*?)</td>',str(windLevel))[0]
windList.append(windDirection+' '+windLevel)
else:
pass
#获取爬取时间
crawlingTime,cityList = [],[]
for i in range(len(dateList)):
crawlingTime.append(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())))
cityList.append(city)
res = {
'日期':dateList,
'星期':weekList,
'天气':weatherList,
'最高气温':higherTemperature,
'最低气温':lowerTemperature,
'风速信息':windList,
"城市":cityList,
"抓取时间":crawlingTime}
return res
#梳理数据 方便后面保存到数据库
def concatData(Dataframe):
res = []
for i in range(len(Dataframe)):
#print(list(Dataframe.iloc[i][:]))
res.append(list(Dataframe.iloc[i][:]))
return res
def saveData(url,cityName,tableName):
# url = 'https://tianqi.2345.com/today-59493.htm'
res = analysisPage(url,cityName)
data = pd.DataFrame(res)
#保存数据
data.to_csv('Resfile{}_{}.csv'.format(cityName,time.strftime('%Y-%m-%d', time.localtime(time.time())),encoding='utf-8'))
#tableName = 'future_data'
# cursor,db = operateSQL()
# cursor.execute("CREATE TABLE IF NOT EXISTS %s ( ID INT AUTO_INCREMENT PRIMARY KEY, DATETIMES VARCHAR(255) , WEATHER VARCHAR(255) ,WEEKEND VARCHAR(255), LOWER_TEMPERATURE VARCHAR(255), HIGH_TEMPERATURE VARCHAR(255), WIND VARCHAR(255), CITY VARCHAR(255) ,CRAWLING_TIME DATE)"%tableName)
# #keys = list(res.keys())
# #cursor.execute("deleted from 表 where CRAWLING_TIME = '{}'"%time.strftime('%Y-%m-%d', time.localtime(time.time())))
# keysBox = ['DATETIMES','WEEKEND','WEATHER','HIGH_TEMPERATURE','LOWER_TEMPERATURE','WIND','CITY','CRAWLING_TIME']
# b = concatData(data)
# for item in b:
# #执行插入语句
# sql="INSERT INTO {0} ({1},{2},{3},{4},{5},{6},{7},{8}) VALUES({9},{10},{11},{12},{13},{14},{15},{16})".format(tableName,keysBox[0],keysBox[1],keysBox[2],keysBox[3],keysBox[4],keysBox[5],keysBox[6],keysBox[7],repr(item[0]),repr(item[1]),repr(item[2]),repr(item[3]),repr(item[4]),repr(item[5]),repr(item[6]),repr(item[7]))
# #sql="INSERT INTO test_data ({0},{1},{2},{3},{4},{5},{6},{7}) VALUES({8},{9},{10},{11},{12},{13},{14},{15})".format(keysBox[0],keysBox[1],keysBox[2],keysBox[3],keysBox[4],keysBox[5],keysBox[6],keysBox[7],repr(item[0]),repr(item[1]),repr(item[2]),repr(item[3]),repr(item[4]),repr(item[5]),repr(item[6]),repr(item[7]))
# print('正在执行sql语句:%s'%sql)
# print('--- --- ---')
# #执行插入语句
# cursor.execute(sql)
# db.commit()
return None
def getHistoryWeather():
#cityList为需要爬取的城市列表的拼音
cityDict = {'长沙':'changsha','杭州':'hangzhou','南宁':'nanning','深圳':'shenzhen','苏州':'suzhou','西安':'xian','厦门':'xiamen'}
tableName = 'future_data'
#cursor,db = operateSQL()
#cursor.execute('DROP TABLE {}'%tableName)
for key,value in cityDict.items():
url = 'https://www.tianqishi.com/lishi/%s.html'%value
saveData(url,cityName=key,tableName=tableName)
time.sleep(5)
print('%s执行完毕'%key)
if __name__ == '__main__':
# url = 'https://www.tianqishi.com/lishi/changsha.html'
# res = analysisPage(url,'长沙')
# print(res)
getHistoryWeather()
有问题可以随时私信我交流 最近较忙 回复较慢 见谅!