python爬取素材公社图片

python爬取素材公社图片,第1张


import requests
import re
# import urllib
# from urllib import request,error

url = "https://www.tooopen.com/aicdr/1096_1100_1_1.aspx"
def askURL(url):
    header = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36",
        "Referer": "https://www.tooopen.com/aicdr/1096_1100_1_2.aspx",
        "Cookie": "ASP.NET_SessionId=4dff2ec6-6f07-47cc-8de1-8f3478edf6e8; Hm_lvt_d3ac2f8840ead98242d6205eeff29cb4=1652173245; Hm_lpvt_d3ac2f8840ead98242d6205eeff29cb4=1652173337; RefreshFilter=http://www.tooopen.com/ajax/gethistory?callback=jQuery183003647995761540557_1652173336490&_=1652173336598"
    }
    res = requests.get(url,verify=False,headers=header)
    page_content = res.text
    obj = re.compile(r'.*?)"',re.S)
    result = obj.finditer(page_content)
    m = 0
    imgs = []
    for it in result:
        imgs.append(it.group('img'))

    ###使用urllib下载图片方式
    # m=0
    # for img in imgs:
    #     urllib.request.urlretrieve(img, "d:/download2/" + str(m) + '.jpg')  # 下载图片到本地
    
    ##使用requests下载图片方式

    for imgurl in imgs:
        imgres = requests.get(imgurl, verify=False, headers=header)
        with open("d:/download2/" + str(m) + '.jpg','wb') as f:
            f.write(imgres.content)
        m += 1

askURL(url)

代码提供了两种下载图片的方式,下载图片中,遇到403错误,加上下面这块代码就可以解决了,注意Referer和Cookie一起加上。

 header = {
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/101.0.4951.54 Safari/537.36",
        "Referer": "https://www.tooopen.com/aicdr/1096_1100_1_2.aspx",
        "Cookie": "ASP.NET_SessionId=4dff2ec6-6f07-47cc-8de1-8f3478edf6e8; Hm_lvt_d3ac2f8840ead98242d6205eeff29cb4=1652173245; Hm_lpvt_d3ac2f8840ead98242d6205eeff29cb4=1652173337; RefreshFilter=http://www.tooopen.com/ajax/gethistory?callback=jQuery183003647995761540557_1652173336490&_=1652173336598"
    }

欢迎分享,转载请注明来源:内存溢出

原文地址:https://54852.com/langs/904710.html

(0)
打赏 微信扫一扫微信扫一扫 支付宝扫一扫支付宝扫一扫
上一篇 2022-05-15
下一篇2022-05-15

发表评论

登录后才能评论

评论列表(0条)

    保存