import requests
import city
import time
# bj = input('你想查詢哪個城市?')
most_cold_city =
"巴拉巴拉"
most_cold_city_wendu =
100a =
1for i in city.citycode.keys():
print
("這是查詢的第%d個城市"
%a) a +=
1# print(i)
# print(city.citycode[i])
# print(city.citycode[bj])
print
(i) url =
""%city.citycode[i]
try:
xing_ying = requests.get(url,timeout=10)
yuan_dai_ma = xing_ying.json(
)# json是把網路資料儲存成字典的資料型別
# print(yuan_dai_ma)
# print(type(yuan_dai_ma)) # dict字典
# print(yuan_dai_ma["message"])
city_name = yuan_dai_ma[
'cityinfo'][
'city'
] wendu = yuan_dai_ma[
"data"][
"wendu"
] shidu = yuan_dai_ma[
"data"][
"shidu"
] zong =
"城市:"
+ city_name +
'\n'
+ \ "溫度:"
+wendu +
"度\n"
+ \ "濕度:"
+shidu
print
(zong)
ifint
(wendu)
< most_cold_city_wendu :
most_cold_city = city
most_cold_city_wendu =
int(wendu)
print
("最新的最冷城市是%s,最冷溫度是%s"
%(city_name, wendu)
)print()
except
:print
('這個城市報錯了'
) time.sleep(
3)
import requests
url =
""xing_ying = requests.get(url)
yuan_dai_ma = xing_ying.json(
)# json是把網路資料儲存成字典的資料型別
print
(yuan_dai_ma)
# print(type(yuan_dai_ma)) # dict字典
# print(yuan_dai_ma["message"])
city = yuan_dai_ma[
'cityinfo'][
'city'
]wendu = yuan_dai_ma[
"data"][
"wendu"
]zong =
"城市:"
+ city +
'\n'
+ \ "溫度:"
+wendu +
"度"print
(zong)
# -*- coding: cp936 -*-
citycode =
# \ 反義字元 反應字元的其他意思
# 敵人的敵人就是朋友
# reverse 反轉的意思
print
(r'123\\n45\1\4\456\65\\675\75\6\456\45\455\34\4545\45\\45\45\45\\45\6'
)print
('123\b456'
)print
("123\t456"
)
API爬取天氣預報資料
和風天氣api 該 為個人開發者提供免費的天氣預報資料,自行訪問官網註冊,在控制台看到個人的key。然後看api文件,基本可以開發了,有訪問次數限制。import requests import time import pymongo 和風天氣api提供了3000多個城市的天氣預報,我們先獲取這些城...
python爬蟲爬取天氣預報資訊
目標天氣預報 需要用到的庫有requests 用來傳送請求和接收url beautifulsoup 用來解析html文字 爬蟲的基本思路 1.首先傳送請求並返回requests 最好模擬谷歌瀏覽器的頭部訪問 即下面的headers 並且設定乙個每次訪問的間隔時間,這樣就不容易觸發 的反爬機制 說白了...
python用json解析爬取天氣預報資料
天氣預報 url 統一資源定位符 windows r cmd 開啟命令 輸入pip install requests 回車 import requests 引入python中內建的包 import json while 1 print 歡迎進入天氣查詢系統 city input 請輸入您要查詢的城市...