12. POST请求爬取数据实战
import json
json.loads(json_str)
json.dumps(dict)
- 使用urllib发送POST数据,并抓取百度翻译信息
from urllib import request,parse
import json
url = 'http://fanyi.baidu.com/sug'
data = {
'kw' : 'python'
}
data = parse.urlencode(data)
headers = {
'Content-Length' : len(data)
}
req = request.Request(url=url,data=bytes(data,encoding='utf-8'),headers=headers)
res = request.urlopen(req)
str_json = res.read().decode('utf-8')
myjson = json.loads(str_json)
info = myjson['data'][0]['v']
print(info)
- 使用requests发送POST数据,并抓取百度翻译信息
import requests
import json
url = 'http://fanyi.baidu.com/sug'
data = {
'kw' : 'python'
}
res = requests.post(url,data=data)
str_json = res.content.decode('utf-8')
myjson = json.loads(str_json)
info = myjson['data'][0]['v']
print(info)
import requests
import json
def fanyi(keyword):
url = 'http://fanyi.baidu.com/sug'
data = {
'kw' : keyword
}
res = requests.post(url,data=data)
str_json = res.content.decode('utf-8')
myjson = json.loads(str_json)
info = myjson['data'][0]['v']
print(info)
if __name__ == '__main__':
while True:
keyword = input('输入翻译的单词:')
if keyword == 'q':
break
fanyi(keyword)
from urllib import request,parse
import json
def fanyi(keyword):
base_url = 'http://fanyi.baidu.com/sug'
data = {
'kw' : keyword
}
data = parse.urlencode(data)
headers = {
'Content-Length':len(data)
}
req = request.Request(url=base_url,data=bytes(data,encoding='utf-8'),headers=headers)
res = request.urlopen(req)
str_json = res.read().decode('utf-8')
myjson = json.loads(str_json)
info = myjson['data'][0]['v']
print(info)
if __name__ == '__main__':
while True:
keyword = input('输入翻译的单词:')
if keyword == 'q':
break
fanyi(keyword)