快速向mysql 数据库插入1000w条测试数据
1.创建一张存储数据的表
CREATE TABLE `user` (
`user_id` int auto_increment,
`user_name` varchar(255) DEFAULT NULL,
`user_password` varchar(255) DEFAULT NULL,
`sex` char(2) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`phone` varchar(255) DEFAULT NULL,
PRIMARY KEY (`user_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
2.修改mysql的接收数据包的大小
为了防止多次请求数据库,我采用了一次提交10w条数据。这时候需要修改mysql数据库接受数据包的大小,防止一次请求传输数据过大。
修改属性为 max_allowed_packet
<!--查看当前能接收数据包的大小-->
show VARIABLES like '%max_allowed_packet%'
<!--修改当前值为100mb,重新登陆之后才能看到改变的值-->
set global max_allowed_packet = 100*1024*1024;
###3.写Java代码来添加1000w条数据
1.我们采用一次提交10w条数据,提交100次。因为每次提交10w条数据所以采用手动提交事务,关闭事务的自动提交,因为插入数据量太大,我们使用PreparedStatement来提高插入速度。
@Slf4j
public class InsertUserData {
private static final String URL = "jdbc:mysql://127.0.0.1:3306/test?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf-8";
private static final String DRIVER = "com.mysql.cj.jdbc.Driver";
private static final String USERNAME = "root";
private static final String PASSWORD = "rootroot";
public static void main(String[] args) throws ClassNotFoundException, SQLException {
Class.forName(DRIVER);
Connection conn = DriverManager.getConnection(URL, USERNAME, PASSWORD);
batchInsert(conn);
}
private static void batchInsert(Connection connection) {
long start = System.currentTimeMillis();
String prefix = "INSERT INTO test.user (user_name, user_password, sex, email, phone) VALUES ";
StringBuffer suffix = new StringBuffer();
try {
//设置事务为非自动提交
connection.setAutoCommit(false);
//采用pst执行,速度要远大于st执行
PreparedStatement pst = (PreparedStatement) connection.prepareStatement(" ");
for (int i = 1; i <= 100; i++) {
for (int j = 1; j <= 100000; j++) {
suffix.append( "('xw" + i * j + "','123456'" + ",'男'" + ",'1612902xxx@qq.com'" + ",'188xxxx5787'" + "),");
}
String sql = prefix + suffix.substring(0, suffix.length() - 1);
pst.addBatch(sql);
pst.executeBatch();
connection.commit();
suffix = new StringBuffer();
}
pst.close();
connection.close();
} catch (SQLException throwables) {
throwables.printStackTrace();
}
long end = System.currentTimeMillis();
log.info("插入1000w条数据所用时间->{}s",(end - start)/1000);
}
}
我们采用多线程的方式使用10个线程同步跑,一个线程写入100w条数据。能优化到40s内完成当前的任务。
public class InsertUserData extends Thread {
private static final String URL = "jdbc:mysql://127.0.0.1:3306/test?serverTimezone=GMT%2B8&useUnicode=true&characterEncoding=utf-8";
private static final String DRIVER = "com.mysql.cj.jdbc.Driver";
private static final String USERNAME = "root";
private static final String PASSWORD = "rootroot";
public static void main(String[] args) throws ClassNotFoundException, SQLException {
for (int i = 1; i <=10; i++) {
new InsertUserData().start();
}
}
@Override
public void run() {
Connection connection = null;
try {
Class.forName(DRIVER);
connection = DriverManager.getConnection(URL,USERNAME, PASSWORD);//获取连接
} catch (ClassNotFoundException e1) {
e1.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
long start = System.currentTimeMillis();
String prefix = "INSERT INTO test.user (user_name, user_password, sex, email, phone) VALUES ";
StringBuffer suffix = new StringBuffer();
try {
//设置事务为非自动提交
connection.setAutoCommit(false);
//采用pst执行,速度要远大于st执行
PreparedStatement pst = (PreparedStatement) connection.prepareStatement(" ");
for (int i = 1; i <= 10; i++) {
for (int j = 1; j <= 100000; j++) {
suffix.append( "('xw" + i * j + "','123456'" + ",'男'" + ",'1612902xxx@qq.com'" + ",'188xxxx5787'" + "),");
}
String sql = prefix + suffix.substring(0, suffix.length() - 1);
pst.addBatch(sql);
pst.executeBatch();
connection.commit();
suffix = new StringBuffer();
}
pst.close();
connection.close();
} catch (SQLException throwables) {
throwables.printStackTrace();
}
long end = System.currentTimeMillis();
log.info("插入100w条数据所用时间->{}s",(end - start)/1000);
}
}