本文将介绍MySQL数据库导入100万数据的三种方式性能比较。
三种方式分别为:
(1)逐条INSERT
(2)批量INSERT提交
(3)通过mysql自带的load data命令
应用场景:假设需要向100万个号码发送短信,收到了100万个号码txt文件,现需要将号码导入到数据库,通过短信系统发送短信。向数据库导入数据的场景还有很多,比如在新老系统切换的时候,需要将老系统的数据迁移导入到新系统,这些都可能涉及到大量数据导入的问题。
首先,我们先准备数据库表、100万个号码、生成insert语句的java代码、批量向数据库提交insert语句的java代码。
(1)数据库表如下:
(2) 生成100万个号码
package com.fd.demo.common;
import org.springframework.stereotype.Component;
import java.io.*;
/**
* @Author: thinkpad
* @Date: 2023-12-30 9:23
*/
@Component
public class TelephoneGenerate {
/**
* 生成号码个数
* @param count
*/
public void generate(int count) throws Exception{
String telephoneBase = "13900000000";
if(count > 100000000){
throw new Exception("号码生成个数最大不能超过1亿");
}
String fileName = "telephoneGenerate.txt";
File file = new File(fileName);
if(!file.exists()){
file.createNewFile();
}
// demo代码 不考虑异常情况
FileOutputStream fileOutputStream = new FileOutputStream(file);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
BufferedWriter bufferedWriter = new BufferedWriter(outputStreamWriter);
for(int i = 0; i < count; i++){
String str = String.valueOf(i);
String telephone = telephoneBase.substring(0, 11-str.length()) + str;
bufferedWriter.write(telephone);
bufferedWriter.write("\n");
}
bufferedWriter.flush();
bufferedWriter.close();
outputStreamWriter.close();
fileOutputStream.close();
}
public static void main(String[] args) throws Exception {
TelephoneGenerate telephoneGenerate = new TelephoneGenerate();
long beginTime = System.currentTimeMillis();
telephoneGenerate .generate(1000000);
long endTime = System.currentTimeMillis();
System.out.println("cost time: " + (endTime - beginTime) + "ms");
}
}
(3) 生成100万条insert语句
package com.fd.demo.common;
import org.springframework.stereotype.Component;
import java.io.*;
import java.util.Objects;
/**
* @Author: thinkpad
* @Date: 2023-12-30 9:22
*/
@Component
public class MySQLInsertGenerate {
/**
* 生成insert语句
*/
public void generateInsertSQL() throws Exception{
String fileName = "telephoneGenerate.txt";
File file = new File(fileName);
// demo代码 不考虑异常情况
FileInputStream fileInputStream = new FileInputStream(file);
InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
String sqlFileName = "telephoneInsertSQL.txt";
File sqlFile = new File(sqlFileName);
FileOutputStream fileOutputStream = new FileOutputStream(sqlFile);
OutputStreamWriter outputStreamWriter = new OutputStreamWriter(fileOutputStream);
BufferedWriter bufferedWriter = new BufferedWriter(outputStreamWriter);
String telephone = bufferedReader.readLine();
while(!Objects.isNull(telephone) && !"".equals(telephone)){
TelephoneRecvListVO telephoneRecvListVO = new TelephoneRecvListVO();
telephoneRecvListVO.setId(IdWorker.nextId()); //雪花算法生成id
telephoneRecvListVO.setAppId(2);
telephoneRecvListVO.setAppName("demo");
telephoneRecvListVO.setTemplateId(2);
telephoneRecvListVO.setTemplateContent("测试短信内容发送");
telephoneRecvListVO.setTelephoneList(telephone); // 如果有多个号码,以短号隔开
telephoneRecvListVO.setCreateDate("20231230");
telephoneRecvListVO.setCreateTime("170000");
telephoneRecvListVO.setStatus("9"); // 9初始状态 0已发送 1发送失败 2发送中
String insertSql = generate(telephoneRecvListVO);
bufferedWriter.write(insertSql);
bufferedWriter.write("\n");
telephone = bufferedReader.readLine();
}
bufferedWriter.flush();
bufferedWriter.close();
outputStreamWriter.close();
fileOutputStream.close();
inputStreamReader.close();
fileInputStream.close();
}
// 可以使用反射进行组装
private String generate(TelephoneRecvListVO telephoneRecvListVO){
StringBuilder sb = new StringBuilder();
sb.append("insert into recv_list(ID, APP_ID, APP_NAME, TEMPLATE_ID, TEMPLATE_CONTENT, TELEPHONE_LIST, CREATE_DATE, CREATE_TIME, STATUS) values(");
sb.append(telephoneRecvListVO.getId());
sb.append(",");
sb.append(telephoneRecvListVO.getAppId());
sb.append(",");
sb.append("'" + telephoneRecvListVO.getAppName() + "'");
sb.append(",");
sb.append(telephoneRecvListVO.getTemplateId());
sb.append(",");
sb.append("'" + telephoneRecvListVO.getTemplateContent() + "'" );
sb.append(",");
sb.append("'" + telephoneRecvListVO.getTelephoneList() + "'" );
sb.append(",");
sb.append("'" + telephoneRecvListVO.getCreateDate() + "'");
sb.append(",");
sb.append("'" + telephoneRecvListVO.getCreateTime() + "'");
sb.append(",");
sb.append("'" + telephoneRecvListVO.getStatus() + "'");
sb.append(");");
return sb.toString();
}
public static void main(String[] args) throws Exception {
MySQLInsertGenerate generate = new MySQLInsertGenerate();
long beginTime = System.currentTimeMillis();
generate.generateInsertSQL();
long endTime = System.currentTimeMillis();
System.out.println("cost time: " + (endTime - beginTime) + " ms");
}
}
(4)单笔向recv_list插入数据
package com.fd.demo.mysqlinsert;
import java.io.*;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.util.Objects;
/**
* @Author: thinkpad
* @Date: 2023-12-30 19:07
*/
public class SingleInsert {
public void singleInsert() throws Exception {
File file = new File("telephoneInsertSQL.txt");
FileInputStream fileInputStream = new FileInputStream(file);
InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
try{
Connection conn = MySqlJDBC.getConnection();
String sql = bufferedReader.readLine();
long currentIndex = 1;
while(!Objects.isNull(sql) && !"".equals(sql)){
if(currentIndex%10000 == 0){
System.out.println("Current Index = " + currentIndex);
}
PreparedStatement ps = conn.prepareStatement(sql);
ps.execute();
sql = bufferedReader.readLine();
currentIndex++;
}
}catch (Exception e){
e.printStackTrace();
}
bufferedReader.close();
inputStreamReader.close();
fileInputStream.close();
}
public static void main(String[] args) throws Exception {
SingleInsert singleInsert = new SingleInsert();
long beginTime = System.currentTimeMillis();
singleInsert.singleInsert();
long endTime = System.currentTimeMillis();
System.out.println("cost time: " + (endTime - beginTime) + " ms");
}
}
单笔总共花费时间:将近31小时
(5)批量向recv_list插入数据(每10000笔提交一次事务)
package com.fd.demo.mysqlinsert;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Objects;
/**
* @Author: thinkpad
* @Date: 2023-12-30 19:23
*/
public class BatchInsert {
public void batchInsert() throws Exception{
File file = new File("telephoneInsertSQL.txt");
FileInputStream fileInputStream = new FileInputStream(file);
InputStreamReader inputStreamReader = new InputStreamReader(fileInputStream);
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
try{
Connection conn = MySqlJDBC.getConnection();
Statement statement = conn.createStatement();
conn.setAutoCommit(false); // 批量插入主要是不自动提交事务,
String sql = bufferedReader.readLine();
long currentIndex = 1;
while(!Objects.isNull(sql) && !"".equals(sql)){
statement.executeUpdate(sql);
if(currentIndex % 10000 == 0){
System.out.println("currentIndex = " + currentIndex);
conn.commit();
}
sql = bufferedReader.readLine();
currentIndex++;
}
}catch (Exception e){
e.printStackTrace();
}
bufferedReader.close();
inputStreamReader.close();
fileInputStream.close();
}
public static void main(String[] args) throws Exception {
BatchInsert batchInsert = new BatchInsert();
long beginTime = System.currentTimeMillis();
batchInsert.batchInsert();
long endTime = System.currentTimeMillis();
System.out.println("cost time: " + (endTime - beginTime) + " ms");
}
}
批量插入数据,仅花费了4分钟,完全秒杀单笔插入的效率。
(6)使用mysql的load data命令
load data infile 'G:/idea_workspace/MySQLPerformance/telephoneImportSQL.txt'
into table recv_list fields terminated by ',' enclosed by '"'
lines terminated by '\n';
如果在执行的过程中发生以下错误:
ERROR 1261 (01000):Row 1 doesn't contain data for all colums
这是因为sql_mode 被设为了 strict 模式, 要想继续导入需要把“strict_trans_tables” 从 sql_mode 中去掉.
查看 MySQL 当前连接的 sql_mode
mysql> show variables like 'sql_mode';
临时修改sql_mode
mysql> set sql_mode='';
?使用load data命令花了2分26秒的时间,比批量导入快了近一倍。
通过上述比较,可以看出load data命令的性能最佳,其次是批量导入,最差的是单笔插入。
上述所有代码工程见?https://download.csdn.net/download/flyingcloude/88681843?spm=1001.2014.3001.5503