美文网首页我爱编程
数据爬去之BS4

数据爬去之BS4

作者: 月下独酌123 | 来源:发表于2018-08-09 14:41 被阅读0次

    BeautifulSoup4

    BeautifulSoup是一个HTML/XML的解析器,主要用于解析和提取HTML/XML数据。BeautifulSoup用来解析HTML比较简单,API非常人性化,支持CSS选择器,Python标准库中的HTML解析器,也支持lxml的XML解析器。


    规则对比.jpg

    安装

    pip install beautifulsoup4

    使用

    from bs4 import BeautifulSoup
    html = '''
    '''
    soup = BeautifulSoup(html, 'lxml')
    print(soup.title)
    

    案例

    from bs4 import BeautifulSoup
    import random
    import requests
    import os
    import time
    
    __author__ = 'wangff'
    
    class StewPic:
        def __init__(self, start_url, page):
            self.start_url = start_url
            self.page = page
            self.i = 1
            self.user_agent_list = [
                'Mozilla/5.0 (Windows NT 6.1; rv,2.0.1) Gecko/20100101 Firefox/4.0.1',
                'Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; en) Presto/2.8.131 Version/11.11',
                'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
                'Mozilla/5.0 (Windows NT 6.1; rv,2.0.1) Gecko/20100101 Firefox/4.0.1',
                'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36',
                'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Maxthon/4.9.2.1000 Chrome/39.0.2146.0 Safari/537.36',
                'Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11',
                'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3',
                'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3',
                'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/532.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/532.3',
                'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5',
                'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11',
            ]
            self.headers = {
                'User-Agent': random.choice(self.user_agent_list),
            }
    
        def get_url_list(self):
            url_list = [self.start_url.format(x) for x in range(1, self.page)]
            return url_list
    
        def get_item(self):
            url_list = self.get_url_list()
    
            for url in url_list:
                print(url)
                response = requests.get(url=url, headers=self.headers)
                # print(response.text)
                soup = BeautifulSoup(response.text, 'lxml')
                people_list = soup.select('div.ptw li div.c')
                for people in people_list:
                    print(people.a['href'])
                    self.into_personal(people.a['href'])
                    # break
                    time.sleep(random.randint(0, 3))
                # break
    
        def into_personal(self, url):
            _response = requests.get(url=url, headers=self.headers)
            if _response.text:
                soup = BeautifulSoup(_response.text, 'lxml')
                pic_url_list = soup.select('div.bm_c ul.ptw li a img')
                os.chdir(r'F:\airline stewardess')
                for pic_url in pic_url_list:
                    print(pic_url['src'])
                    self.download_pic(pic_url['src'])
                    time.sleep(random.randint(0, 2))
            else:
                print('照片有误!')
    
        def download_pic(self, url):
            response = requests.get(url=url, headers=self.headers)
            p = response.content
    
            try:
                with open(str(self.i) + '.jpg', 'ab') as f:
                    f.write(p)
                    f.close()
            except Exception as e:
                print(e)
    
            self.i += 1
    
    
    if __name__ == '__main__':
        stewardss_pic = StewPic('http://www.kongjie.com/home.php?mod=space&do=album&view=all&page={}', 100)
        stewardss_pic.get_item()
    

    相关文章

      网友评论

        本文标题:数据爬去之BS4

        本文链接:https://www.haomeiwen.com/subject/tryxbftx.html