美文网首页
Python3爬虫用矩形移动方式获取百度POI信息

Python3爬虫用矩形移动方式获取百度POI信息

作者: luogps9 | 来源:发表于2019-05-19 22:06 被阅读0次

最近项目上有个需求,获取特定区域的POI信息,先在地图上初始化一个矩形,然后用该矩形从特定区域的左下角一直移动到右上角的位置,移动过程中,矩形所覆盖的区域的POI即为需要的信息。该方式主要是为了将地图进行网格化,以获取最小单位的信息,特记录一下,以便自己后续查看。

完整代码:
# coding=UTF-8
import requests
import time
import random
import csv
import sys
ty=sys.getfilesystemencoding()

class Yn_School(object):
    def __init__(self):
        self.base_url= 'http://api.map.baidu.com/place/v2/search?query={}&scope=2&bounds={}&page_size=20&page_num={}&output=json&ak=您的ak'
        self.user_agent_list = [
            {'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1"},
            {'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0"},
            {'User-Agent':"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"},
            {'User-Agent':"Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.9.168 Version/11.50"},
            {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36'},
            {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1'},
            {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 6.1; ) AppleWebKit/534.12 (KHTML, like Gecko) Maxthon/3.0 Safari/534.12'},
            {'User-Agent':'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1)'},
            {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11'},
            {'User-Agent':'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16'},
            {'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER '},
            {'User-Agent':'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400) '},
            {'User-Agent':'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0 '},
            {'User-Agent':'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3'},
            {'User-Agent':'Opera/9.80 (Windows NT 6.1; U; en) Presto/2.8.131 Version/11.11'},
            ]
        self.lat_1 = 24.390894
        self.lon_1 = 102.174112
        self.lat_2 = 26.548645
        self.lon_2 = 103.678942
        self.las_lat = 1
        self.las_lon = 1  # 给las一个值1
        self.ak = '您的ak'
        self.url0 = 'http://api.map.baidu.com/place/v2/search?'
        self.lat_count = int((self.lat_2 - self.lat_1) / self.las_lat + 1)
        self.lon_count = int((self.lon_2 - self.lon_1) / self.las_lon + 1)
        # self.user_ip_list =[{"http":"//116.7.176.75:8118"},{"http":"//119.179.60.117:8118"}]
        self.data_list=[]
        self.type=['酒店']
        # self.type=['美食','酒店','购物','生活服务','旅游景点','休闲娱乐','运动健身','教育培训','文化传媒','医疗','汽车服务','交通设施','金融','房地产','公司企业'
        #           ,'政府机构']

    # 1、取完整URL
    def get_url_list(self):
        url_list = []
        for i in self.type:
            for lat_c in range(0, self.lat_count):
                lat_b1 = self.lat_1 + self.las_lat * lat_c
                for lon_c in range(0, self.lon_count):
                    lon_b1 = self.lon_1 + self.las_lon * lon_c
                    lan_all = str(lat_b1)+','+str(lon_b1)+','+str(lat_b1+self.las_lat)+','+str(lon_b1+self.las_lon)
                    for j in range(0, 1):   # 网页数 需修改
                        url_list.append(self.base_url.format(i,lan_all,j))
        return url_list
    # 2、发请求
    def send_request(self,url):
        headers = random.choice(self.user_agent_list)
        print(headers)
        # proxy = random.choice(self.user_ip_list)
        # print(proxy)
        time.sleep(random.random() * 3)  # 随机访问间隔
        html = requests.get(url,headers=headers)  # 获取网页信息
        data = html.json()  # 获取网页信息的json格式数据
        print("正在爬取 %s" % url)
        return data
    # 3、解析数据
    def parse_data(self,data):
        # print(len(data['results']))
        if len(data['results']) > 0:
            for item in data['results']:
                data_dict = {}
                data_dict['一级分类'] = item['detail_info']['tag'].split(';')[0]
                data_dict['二级分类'] = item['detail_info']['tag'].split(';')[1]
                data_dict['名称'] = item['name']
                data_dict['经度'] = item['location']['lng']
                data_dict['纬度'] = item['location']['lat']
                data_dict['城市'] = item['city']
                data_dict['区域'] = item['area']
                data_dict['地址'] = item['address']
                if "telephone" in item:
                    data_dict['电话'] = item["telephone"].replace(',', ' ')
                else:
                    data_dict['电话'] = ''
                self.data_list.append(data_dict)
                print(data_dict)
        else:
            print('空数据')
            # print(self.data_list)
    # 4、保存数据
    def save_data(self):
        csv_file = open('BD_POI.csv', 'w', encoding='gbk', newline='')
        # 添加csv标题
        if len(self.data_list)>0:
            sheet_title = self.data_list[0].keys()
            sheet_content = []
            for data in self.data_list:
                sheet_content.append(data.values())
            writer = csv.writer(csv_file)
            writer.writerow(sheet_title)
            writer.writerows(sheet_content)
            csv_file.close()
        else:
            print('空数据')
        print("写入成功")
    # 5、统筹调用
    def run(self):
        url_list = self.get_url_list()
        for url in url_list:
            data = self.send_request(url)
            self.parse_data(data)
        self.save_data()
Yn_School().run()

相关文章

网友评论

      本文标题:Python3爬虫用矩形移动方式获取百度POI信息

      本文链接:https://www.haomeiwen.com/subject/odrczqtx.html