parent
7703bbf66f
commit
0487bb1e82
@ -1,74 +1,8 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
def get_weather():
|
||||
url = "https://www.weather.com.cn/textFC/db.shtml"
|
||||
response = requests.get(url)
|
||||
html_content = response.content
|
||||
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
|
||||
# 获取日期
|
||||
day_tabs = soup.find('ul', class_='day_tabs')
|
||||
days = [day.text.strip() for day in day_tabs.find_all('li')][:3] # 只取前三天
|
||||
|
||||
# 获取天气信息
|
||||
weather_data = []
|
||||
tables = soup.find_all('table', width="100%") # 找到所有天气表格
|
||||
for table in tables:
|
||||
rows = table.find_all('tr')
|
||||
for row in rows:
|
||||
cells = row.find_all('td')
|
||||
if len(cells) >= 8: # 确保是天气数据行
|
||||
city = cells[1].text.strip()
|
||||
weather_info = {
|
||||
'city': city,
|
||||
'today': {
|
||||
'day': {
|
||||
'weather': cells[2].text.strip(),
|
||||
'wind': cells[3].text.strip(),
|
||||
'temp': cells[4].text.strip()
|
||||
},
|
||||
'night': {
|
||||
'weather': cells[5].text.strip(),
|
||||
'wind': cells[6].text.strip(),
|
||||
'temp': cells[7].text.strip()
|
||||
}
|
||||
}
|
||||
}
|
||||
# 检查是否有明天和后天的数据
|
||||
if len(cells) >= 15:
|
||||
weather_info['tomorrow'] = {
|
||||
'day': {
|
||||
'weather': cells[9].text.strip(),
|
||||
'wind': cells[10].text.strip(),
|
||||
'temp': cells[11].text.strip()
|
||||
},
|
||||
'night': {
|
||||
'weather': cells[12].text.strip(),
|
||||
'wind': cells[13].text.strip(),
|
||||
'temp': cells[14].text.strip()
|
||||
}
|
||||
}
|
||||
if len(cells) >= 22:
|
||||
weather_info['day_after_tomorrow'] = {
|
||||
'day': {
|
||||
'weather': cells[16].text.strip(),
|
||||
'wind': cells[17].text.strip(),
|
||||
'temp': cells[18].text.strip()
|
||||
},
|
||||
'night': {
|
||||
'weather': cells[19].text.strip(),
|
||||
'wind': cells[20].text.strip(),
|
||||
'temp': cells[21].text.strip()
|
||||
}
|
||||
}
|
||||
weather_data.append(weather_info)
|
||||
|
||||
return days, weather_data
|
||||
from WxMini.Utils.TianQiUtil import *
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
days, weather_data = get_weather()
|
||||
|
||||
print(weather_data)
|
||||
get_weather('长春')
|
||||
|
@ -0,0 +1,57 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
async def get_weather(city_name):
|
||||
|
||||
url = "https://www.weather.com.cn/textFC/db.shtml"
|
||||
response = requests.get(url)
|
||||
html_content = response.content
|
||||
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
|
||||
# 获取日期
|
||||
day_tabs = soup.find('ul', class_='day_tabs')
|
||||
days = [day.text.strip() for day in day_tabs.find_all('li')]
|
||||
|
||||
# 获取天气信息
|
||||
weather_data = []
|
||||
tables = soup.find_all('table', width="100%") # 找到所有天气表格
|
||||
|
||||
day = 0
|
||||
for table in tables:
|
||||
rows = table.find_all('tr')[2:] # 跳过前两行表头
|
||||
for row in rows:
|
||||
cols = row.find_all('td')
|
||||
if len(cols) >= 7: # 确保是数据行
|
||||
city = cols[1].text.strip()
|
||||
if city != city_name:
|
||||
continue
|
||||
day_weather = cols[2].text.strip()
|
||||
day_wind = cols[3].text.strip()
|
||||
day_temp = cols[4].text.strip()
|
||||
night_weather = cols[5].text.strip()
|
||||
night_wind = cols[6].text.strip()
|
||||
night_temp = cols[7].text.strip()
|
||||
|
||||
d = days[day]
|
||||
day = day + 1
|
||||
weather_data.append({
|
||||
'day': d,
|
||||
'city': city,
|
||||
'day_weather': day_weather,
|
||||
'day_wind': day_wind,
|
||||
'day_temp': day_temp,
|
||||
'night_weather': night_weather,
|
||||
'night_wind': night_wind,
|
||||
'night_temp': night_temp
|
||||
})
|
||||
|
||||
# 打印结果
|
||||
for data in weather_data:
|
||||
print(f"日期: {data['day']}")
|
||||
print(f"城市: {data['city']}")
|
||||
print(f"白天天气: {data['day_weather']}, 风向风力: {data['day_wind']}, 最高气温: {data['day_temp']}℃")
|
||||
print(f"夜间天气: {data['night_weather']}, 风向风力: {data['night_wind']}, 最低气温: {data['night_temp']}℃")
|
||||
print('-' * 40)
|
||||
return weather_data
|
||||
|
Binary file not shown.
Loading…
Reference in new issue