(2) #带恳求头的恳求(下载网页) from urllib.request import Request def search_baidu(): #收集资本的接口(url) url = http://www.baidu.com #生成恳求对象,封拆恳求的url和头header request = Request(url,headers={ Cookie:BDUSS=FEczRjUH5HQVRXbThURTF5fnVadHVJY3o4a3c1ZjZSWW5iRlJCblJGb0tiZEZmSVFBQUFBJCQAAAAAAAAAAAEAAADz27f1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArgqV8K4KlfU1; BAIDUID=84064DDD18CD95AA7C759B7E9797C1AC:FG=1; BIDUPSID=84064DDD18CD95AA7C759B7E9797C1AC; PSTM=1604970252; H_PS_PSSID=32815_1425_33050_32951_33058_31254_32723_32961_32958_31709_26350_32910_22157; BD_UPN=12314753; BDORZ=B490B5EBF6F3CD402E515D22BCDA1598; BDSFRCVID=T7IOJeC62Rlme4crmSg5tO4q9If60P6TH6aobAgk_6MxiWy-lq7oEG0PHM8g0Ku-hD88ogKKWmOTH7IF_2uxOjjg8UtVJeC6EG0Ptf8g0M5; H_BDCLCKID_SF=JbAtoKD-JKvJfJjkM4rHqR_Lqxby26nnJ259aJ5nJDoCDb6Nj-bKhqtsjqO9tn_8WCFqoUtKQpP-HJ7PLtJOBTKI2MrUhCrQ-RvyKl0MLpnYbb0xyn_VMM3beMnMBMnUamOnaI3Y3fAKftnOM46JehL3346-35543bRTLnLy5KJYMDcnK4-XD6QyjGQP; yjs_js_security_passport=194e977878dd8fbf62e9a0a02dd0de23d2c499e0_1604971951_js; BDUSS_BFESS=FEczRjUH5HQVRXbThURTF5fnVadHVJY3o4a3c1ZjZSWW5iRlJCblJGb0tiZEZmSVFBQUFBJCQAAAAAAAAAAAEAAADz27f1AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAArgqV8K4KlfU1; delPer=0; BD_CK_SAM=1; PSINO=7; COOKIE_SESSION=9_2_4_3_8_1_0_0_3_1_0_0_4812_0_23_0_1604975188_1604970374_1604975165%7C6%230_1_1604970372%7C1; BD_HOME=1; BDRCVFR[feWj1Vr5u3D]=I67x6TjHwwYf0; H_PS_645EC=5ec9hGuQpNycuUsZe6Xii1YwYSP3SSJBZP3HGmGNDfHyQHAIiQgx%2FSGLiBT%2BOIH8mWMg; BA_HECTOR=0la4ahag21ag802uen1fqk0nk0p, User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.193 Safari/537.36 }) #倡议恳求 response = urlopen(request) assert response.code == 200 print(恳求胜利) # 读取响应的数据 bytes_ = response.read() # 将响应的数据写入文件中 with open(baidu.html,wb) as file: file.write(bytes_) if __name__ == __main__: search_baidu()
0