python爬虫数据(中国人口信息)存入MYSQL数据库
爬取网站:http://www.china-10.com/news/488659.html
代码以及注释如下:
import requests from bs4 import BeautifulSoup import re import MySQLdb #连接数据库 db = MySQLdb.connect("localhost","root","123456","lianxi",charset="utf8") cursor = db.cursor() #数据库中如果已经有china_population此表,则删除已存在的此表 cursor.execute("DROP TABLE IF EXISTS china_population") #创建新表china_population sql = r"""CREATE TABLE china_population ( rank INT(100) NOT NULL, province CHAR(100) NOT NULL, population INT(100) NOT NULL, rise INT(100) NOT NULL)""" cursor.execute(sql) url = 'http://www.china-10.com/news/488659.html' html = requests.get(url) soup = BeautifulSoup(html.content,'html.parser') #找到所有class为md_td的td元素 aaa = soup.find_all(name="td", attrs={"class":re.compile(r"md_td")}) #检查索引,以便于后面爬取工作 #for n,i in enumerate(aaa): # print(n,i.text) demo_list = [] for i in aaa[4:128]: demo_list.append(i.text) while demo_list: print(int(demo_list[0:4][0]),demo_list[0:4][1],int(float(demo_list[0:4][2])*10000),int(float(demo_list[0:4][3])*10000)) # 测试数据类型 # print(type(demo_list[0:4][0]), type(demo_list[0:4][1]), type(demo_list[0:4][2]), type(demo_list[0:4][3])) insert_message = ("INSERT INTO china_population(rank,province,population,rise)" "VALUES(%s,%s,%s,%s)") data = (int(demo_list[0:4][0]),demo_list[0:4][1],int(float(demo_list[0:4][2])*10000),int(float(demo_list[0:4][3])*10000)) # 数据插入数据库 cursor.execute(insert_message, data) db.commit() del demo_list[0:4] #关闭数据库 db.close()
主要步骤:
1.连接数据库并且创建表;
2.爬取并整理信息;
3.将信息存入数据库;
4.关闭数据库。
数据表展示如下: