"数据库优化就像优化图书馆,用对了方法,找书更快,存书更有序!" 📚🔍
🎯 什么是数据库优化?
想象一下,你是一个超级忙碌的图书管理员 📚。每天都有很多读者来借书还书,如果你不善于管理书架和索引,那效率太低了!
数据库优化就像是学会最聪明的图书馆管理方法,让数据存储更高效,查询更快速!
🏃♂️ 核心思想:用优化换查询效率,用技术换性能
未优化:全表扫描 → 慢查询 → 性能低下
已优化:索引查询 → 快速定位 → 性能提升
效率提升:10-100倍! 🎉
🎨 数据库优化的四种策略
1. 查询优化 - 让查询"跑"得更快 🏃
生活比喻: 就像优化搜索方法,用对了索引,找东西更快!
@Service
public class QueryOptimizationService {
// 索引优化
public static class IndexOptimization {
// 单列索引优化
public void optimizeSingleColumnIndex() {
// 未优化:全表扫描
String slowQuery = "SELECT * FROM users WHERE age > 25";
// 优化:使用索引
String fastQuery = "SELECT * FROM users WHERE age > 25"; // 假设age列有索引
// 创建索引
String createIndex = "CREATE INDEX idx_users_age ON users(age)";
}
// 复合索引优化
public void optimizeCompositeIndex() {
// 未优化:多个单列索引
String slowQuery = "SELECT * FROM users WHERE age > 25 AND city = 'Beijing'";
// 优化:复合索引
String fastQuery = "SELECT * FROM users WHERE age > 25 AND city = 'Beijing'";
// 创建复合索引
String createCompositeIndex = "CREATE INDEX idx_users_age_city ON users(age, city)";
}
// 覆盖索引优化
public void optimizeCoveringIndex() {
// 未优化:回表查询
String slowQuery = "SELECT name, age FROM users WHERE age > 25";
// 优化:覆盖索引
String fastQuery = "SELECT name, age FROM users WHERE age > 25";
// 创建覆盖索引
String createCoveringIndex = "CREATE INDEX idx_users_age_name ON users(age, name)";
}
// 部分索引优化
public void optimizePartialIndex() {
// 未优化:全表索引
String slowQuery = "SELECT * FROM users WHERE status = 'active'";
// 优化:部分索引
String fastQuery = "SELECT * FROM users WHERE status = 'active'";
// 创建部分索引
String createPartialIndex = "CREATE INDEX idx_users_active ON users(name) WHERE status = 'active'";
}
// 函数索引优化
public void optimizeFunctionIndex() {
// 未优化:函数查询
String slowQuery = "SELECT * FROM users WHERE UPPER(name) = 'JOHN'";
// 优化:函数索引
String fastQuery = "SELECT * FROM users WHERE UPPER(name) = 'JOHN'";
// 创建函数索引
String createFunctionIndex = "CREATE INDEX idx_users_name_upper ON users(UPPER(name))";
}
}
// SQL优化
public static class SQLOptimization {
// 查询重写
public void optimizeQueryRewrite() {
// 未优化:子查询
String slowQuery = "SELECT * FROM users WHERE id IN (SELECT user_id FROM orders WHERE amount > 1000)";
// 优化:JOIN查询
String fastQuery = "SELECT u.* FROM users u JOIN orders o ON u.id = o.user_id WHERE o.amount > 1000";
// 未优化:OR查询
String slowOrQuery = "SELECT * FROM users WHERE age < 18 OR age > 65";
// 优化:UNION查询
String fastUnionQuery = "SELECT * FROM users WHERE age < 18 UNION SELECT * FROM users WHERE age > 65";
}
// 执行计划优化
public void optimizeExecutionPlan() {
// 使用EXPLAIN分析查询计划
String explainQuery = "EXPLAIN SELECT * FROM users WHERE age > 25";
// 强制使用索引
String forceIndexQuery = "SELECT * FROM users USE INDEX (idx_users_age) WHERE age > 25";
// 避免使用索引
String ignoreIndexQuery = "SELECT * FROM users IGNORE INDEX (idx_users_age) WHERE age > 25";
}
// 分页优化
public void optimizePagination() {
// 未优化:OFFSET分页
String slowPagination = "SELECT * FROM users ORDER BY id LIMIT 1000, 20";
// 优化:游标分页
String fastPagination = "SELECT * FROM users WHERE id > 1000 ORDER BY id LIMIT 20";
// 优化:延迟关联
String delayedJoin = "SELECT u.* FROM users u JOIN (SELECT id FROM users ORDER BY id LIMIT 1000, 20) t ON u.id = t.id";
}
// 批量操作优化
public void optimizeBatchOperations() {
// 未优化:单条插入
String slowInsert = "INSERT INTO users (name, age) VALUES ('John', 25)";
// 优化:批量插入
String fastInsert = "INSERT INTO users (name, age) VALUES ('John', 25), ('Jane', 30), ('Bob', 35)";
// 优化:批量更新
String batchUpdate = "UPDATE users SET status = 'active' WHERE id IN (1, 2, 3, 4, 5)";
}
}
// 分库分表
public static class DatabaseSharding {
// 水平分片
public static class HorizontalSharding {
public void shardByUserId(long userId) {
// 根据用户ID分片
int shardIndex = (int) (userId % 4); // 4个分片
String shardTable = "users_" + shardIndex;
String query = "SELECT * FROM " + shardTable + " WHERE id = " + userId;
}
public void shardByHash(String key) {
// 根据哈希值分片
int hash = key.hashCode();
int shardIndex = Math.abs(hash) % 4;
String shardTable = "users_" + shardIndex;
String query = "SELECT * FROM " + shardTable + " WHERE key = '" + key + "'";
}
public void shardByRange(long value) {
// 根据范围分片
String shardTable;
if (value < 1000000) {
shardTable = "users_small";
} else if (value < 10000000) {
shardTable = "users_medium";
} else {
shardTable = "users_large";
}
String query = "SELECT * FROM " + shardTable + " WHERE value = " + value;
}
}
// 垂直分片
public static class VerticalSharding {
public void shardByTable() {
// 按表分片
String userTable = "users";
String orderTable = "orders";
String productTable = "products";
// 用户相关查询
String userQuery = "SELECT * FROM " + userTable + " WHERE id = 1";
// 订单相关查询
String orderQuery = "SELECT * FROM " + orderTable + " WHERE user_id = 1";
// 产品相关查询
String productQuery = "SELECT * FROM " + productTable + " WHERE id = 1";
}
public void shardByColumn() {
// 按列分片
String userBasicTable = "users_basic"; // 基本信息
String userDetailTable = "users_detail"; // 详细信息
// 查询基本信息
String basicQuery = "SELECT id, name, age FROM " + userBasicTable + " WHERE id = 1";
// 查询详细信息
String detailQuery = "SELECT id, address, phone FROM " + userDetailTable + " WHERE id = 1";
}
}
// 分片策略
public static class ShardingStrategy {
public void consistentHashing(String key) {
// 一致性哈希分片
int hash = key.hashCode();
int shardIndex = Math.abs(hash) % 4;
String shardTable = "users_" + shardIndex;
String query = "SELECT * FROM " + shardTable + " WHERE key = '" + key + "'";
}
public void rangeBasedSharding(long value) {
// 基于范围的分片
String shardTable;
if (value >= 0 && value < 1000000) {
shardTable = "users_0_1m";
} else if (value >= 1000000 && value < 10000000) {
shardTable = "users_1m_10m";
} else {
shardTable = "users_10m_plus";
}
String query = "SELECT * FROM " + shardTable + " WHERE value = " + value;
}
public void directoryBasedSharding(String key) {
// 基于目录的分片
String shardTable = getShardTable(key);
String query = "SELECT * FROM " + shardTable + " WHERE key = '" + key + "'";
}
private String getShardTable(String key) {
// 根据目录规则确定分片表
return "users_" + key.charAt(0);
}
}
// 跨分片查询
public static class CrossShardQuery {
public void queryAcrossShards(String condition) {
// 跨分片查询
List<String> results = new ArrayList<>();
for (int i = 0; i < 4; i++) {
String shardTable = "users_" + i;
String query = "SELECT * FROM " + shardTable + " WHERE " + condition;
// 执行查询并收集结果
List<String> shardResults = executeQuery(query);
results.addAll(shardResults);
}
// 合并结果
mergeResults(results);
}
private List<String> executeQuery(String query) {
// 执行查询
return new ArrayList<>();
}
private void mergeResults(List<String> results) {
// 合并结果
}
}
}
}
2. 连接管理 - 让连接"复用"更高效 🔗
生活比喻: 就像管理电话线路,用对了方法,通话更稳定,成本更低!
@Service
public class ConnectionManagementService {
// 连接池优化
public static class ConnectionPoolOptimization {
// HikariCP连接池配置
@Configuration
public static class HikariCPConfig {
@Bean
public DataSource dataSource() {
HikariConfig config = new HikariConfig();
// 基本配置
config.setJdbcUrl("jdbc:mysql://localhost:3306/testdb");
config.setUsername("username");
config.setPassword("password");
config.setDriverClassName("com.mysql.cj.jdbc.Driver");
// 连接池配置
config.setMaximumPoolSize(20); // 最大连接数
config.setMinimumIdle(5); // 最小空闲连接数
config.setConnectionTimeout(30000); // 连接超时时间
config.setIdleTimeout(600000); // 空闲超时时间
config.setMaxLifetime(1800000); // 连接最大生命周期
// 性能优化配置
config.setLeakDetectionThreshold(60000); // 连接泄漏检测阈值
config.setConnectionTestQuery("SELECT 1"); // 连接测试查询
config.setValidationTimeout(5000); // 验证超时时间
// 连接池名称
config.setPoolName("HikariCP-Pool");
return new HikariDataSource(config);
}
}
// Druid连接池配置
@Configuration
public static class DruidConfig {
@Bean
public DataSource druidDataSource() {
DruidDataSource dataSource = new DruidDataSource();
// 基本配置
dataSource.setUrl("jdbc:mysql://localhost:3306/testdb");
dataSource.setUsername("username");
dataSource.setPassword("password");
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
// 连接池配置
dataSource.setInitialSize(5); // 初始连接数
dataSource.setMaxActive(20); // 最大连接数
dataSource.setMinIdle(5); // 最小空闲连接数
dataSource.setMaxWait(60000); // 最大等待时间
// 性能优化配置
dataSource.setValidationQuery("SELECT 1"); // 验证查询
dataSource.setTestWhileIdle(true); // 空闲时测试连接
dataSource.setTestOnBorrow(false); // 借用时测试连接
dataSource.setTestOnReturn(false); // 归还时测试连接
// 连接泄漏检测
dataSource.setRemoveAbandoned(true); // 移除废弃连接
dataSource.setRemoveAbandonedTimeout(300); // 废弃连接超时时间
dataSource.setLogAbandoned(true); // 记录废弃连接日志
return dataSource;
}
}
// 连接池监控
public static class ConnectionPoolMonitor {
public void monitorHikariCP(HikariDataSource dataSource) {
HikariPoolMXBean poolBean = dataSource.getHikariPoolMXBean();
log.info("HikariCP连接池状态:");
log.info("活跃连接数: {}", poolBean.getActiveConnections());
log.info("空闲连接数: {}", poolBean.getIdleConnections());
log.info("总连接数: {}", poolBean.getTotalConnections());
log.info("等待连接的线程数: {}", poolBean.getThreadsAwaitingConnection());
}
public void monitorDruid(DruidDataSource dataSource) {
log.info("Druid连接池状态:");
log.info("活跃连接数: {}", dataSource.getActiveCount());
log.info("空闲连接数: {}", dataSource.getPoolingCount());
log.info("总连接数: {}", dataSource.getConnectCount());
log.info("等待连接的线程数: {}", dataSource.getWaitThreadCount());
}
}
}
// 连接复用
public static class ConnectionReuse {
// 连接复用策略
public static class ConnectionReuseStrategy {
public void reuseConnection(Connection connection) {
try {
// 检查连接是否有效
if (connection.isValid(5)) {
// 连接有效,可以复用
useConnection(connection);
} else {
// 连接无效,需要重新创建
Connection newConnection = createNewConnection();
useConnection(newConnection);
}
} catch (SQLException e) {
log.error("连接复用失败", e);
}
}
private void useConnection(Connection connection) {
// 使用连接
}
private Connection createNewConnection() {
// 创建新连接
return null;
}
}
// 连接预热
public static class ConnectionWarmup {
public void warmupConnections(DataSource dataSource) {
// 预热连接池
try {
for (int i = 0; i < 5; i++) {
Connection connection = dataSource.getConnection();
// 执行预热查询
PreparedStatement ps = connection.prepareStatement("SELECT 1");
ps.executeQuery();
ps.close();
connection.close();
}
} catch (SQLException e) {
log.error("连接预热失败", e);
}
}
}
// 连接监控
public static class ConnectionMonitoring {
public void monitorConnections(DataSource dataSource) {
// 监控连接状态
if (dataSource instanceof HikariDataSource) {
HikariDataSource hikariDataSource = (HikariDataSource) dataSource;
HikariPoolMXBean poolBean = hikariDataSource.getHikariPoolMXBean();
// 记录连接池指标
recordConnectionMetrics(poolBean);
}
}
private void recordConnectionMetrics(HikariPoolMXBean poolBean) {
// 记录连接池指标
log.info("连接池指标: 活跃={}, 空闲={}, 总计={}",
poolBean.getActiveConnections(),
poolBean.getIdleConnections(),
poolBean.getTotalConnections());
}
}
}
// 故障转移
public static class FailoverManagement {
// 主从切换
public static class MasterSlaveSwitch {
private DataSource masterDataSource;
private DataSource slaveDataSource;
private volatile boolean masterAvailable = true;
public Connection getConnection() throws SQLException {
if (masterAvailable) {
try {
return masterDataSource.getConnection();
} catch (SQLException e) {
log.error("主数据库连接失败,切换到从数据库", e);
masterAvailable = false;
return slaveDataSource.getConnection();
}
} else {
return slaveDataSource.getConnection();
}
}
public void checkMasterHealth() {
try (Connection connection = masterDataSource.getConnection()) {
PreparedStatement ps = connection.prepareStatement("SELECT 1");
ps.executeQuery();
masterAvailable = true;
log.info("主数据库健康检查通过");
} catch (SQLException e) {
log.error("主数据库健康检查失败", e);
masterAvailable = false;
}
}
}
// 负载均衡
public static class LoadBalancing {
private List<DataSource> dataSources;
private AtomicInteger currentIndex = new AtomicInteger(0);
public Connection getConnection() throws SQLException {
int index = currentIndex.getAndIncrement() % dataSources.size();
DataSource dataSource = dataSources.get(index);
return dataSource.getConnection();
}
public void addDataSource(DataSource dataSource) {
dataSources.add(dataSource);
}
public void removeDataSource(DataSource dataSource) {
dataSources.remove(dataSource);
}
}
}
}
3. 事务优化 - 让事务"处理"更高效 ⚡
生活比喻: 就像银行转账,用对了方法,转账更快,更安全!
@Service
public class TransactionOptimizationService {
// 事务粒度控制
public static class TransactionGranularityControl {
// 细粒度事务
public void fineGrainedTransaction() {
// 每个操作一个事务
for (int i = 0; i < 100; i++) {
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.execute(status -> {
// 执行单个操作
performSingleOperation(i);
return null;
});
}
}
// 粗粒度事务
public void coarseGrainedTransaction() {
// 批量操作一个事务
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.execute(status -> {
for (int i = 0; i < 100; i++) {
performSingleOperation(i);
}
return null;
});
}
private void performSingleOperation(int i) {
// 执行单个操作
}
// 事务边界优化
public void optimizeTransactionBoundary() {
// 在事务外执行非数据库操作
String data = prepareData();
// 在事务内执行数据库操作
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.execute(status -> {
saveToDatabase(data);
return null;
});
}
private String prepareData() {
// 准备数据
return "prepared data";
}
private void saveToDatabase(String data) {
// 保存到数据库
}
}
// 分布式事务
public static class DistributedTransaction {
// 两阶段提交
public void twoPhaseCommit() {
// 第一阶段:准备阶段
boolean allPrepared = prepareAllTransactions();
if (allPrepared) {
// 第二阶段:提交阶段
commitAllTransactions();
} else {
// 回滚所有事务
rollbackAllTransactions();
}
}
private boolean prepareAllTransactions() {
// 准备所有事务
return true;
}
private void commitAllTransactions() {
// 提交所有事务
}
private void rollbackAllTransactions() {
// 回滚所有事务
}
// 补偿事务
public void compensationTransaction() {
try {
// 执行业务操作
performBusinessOperation();
// 提交事务
commitTransaction();
} catch (Exception e) {
// 执行补偿操作
performCompensationOperation();
throw e;
}
}
private void performBusinessOperation() {
// 执行业务操作
}
private void commitTransaction() {
// 提交事务
}
private void performCompensationOperation() {
// 执行补偿操作
}
}
// 事务隔离级别
public static class TransactionIsolationLevel {
// 读未提交
public void readUncommitted() {
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.setIsolationLevel(TransactionDefinition.ISOLATION_READ_UNCOMMITTED);
transactionTemplate.execute(status -> {
// 执行操作
return null;
});
}
// 读已提交
public void readCommitted() {
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.setIsolationLevel(TransactionDefinition.ISOLATION_READ_COMMITTED);
transactionTemplate.execute(status -> {
// 执行操作
return null;
});
}
// 可重复读
public void repeatableRead() {
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.setIsolationLevel(TransactionDefinition.ISOLATION_REPEATABLE_READ);
transactionTemplate.execute(status -> {
// 执行操作
return null;
});
}
// 串行化
public void serializable() {
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.setIsolationLevel(TransactionDefinition.ISOLATION_SERIALIZABLE);
transactionTemplate.execute(status -> {
// 执行操作
return null;
});
}
}
// 死锁避免
public static class DeadlockAvoidance {
// 锁顺序一致
public void consistentLockOrder() {
// 按照固定顺序获取锁
String lock1 = "user_1";
String lock2 = "user_2";
if (lock1.compareTo(lock2) < 0) {
acquireLock(lock1);
acquireLock(lock2);
} else {
acquireLock(lock2);
acquireLock(lock1);
}
}
private void acquireLock(String lock) {
// 获取锁
}
// 超时锁
public void timeoutLock() {
try {
// 设置锁超时时间
boolean acquired = tryAcquireLockWithTimeout(5000); // 5秒超时
if (acquired) {
// 执行业务逻辑
performBusinessLogic();
} else {
log.warn("获取锁超时");
}
} finally {
releaseLock();
}
}
private boolean tryAcquireLockWithTimeout(long timeout) {
// 尝试获取锁
return true;
}
private void performBusinessLogic() {
// 执行业务逻辑
}
private void releaseLock() {
// 释放锁
}
// 死锁检测
public void detectDeadlock() {
// 检测死锁
if (isDeadlockDetected()) {
log.error("检测到死锁");
// 处理死锁
handleDeadlock();
}
}
private boolean isDeadlockDetected() {
// 检测死锁
return false;
}
private void handleDeadlock() {
// 处理死锁
}
}
}
4. 数据库性能优化 - 让数据库"运行"更高效 🚀
生活比喻: 就像优化汽车引擎,用对了方法,跑得更快,更省油!
@Service
public class DatabasePerformanceOptimizationService {
// 查询缓存
public static class QueryCache {
private final Cache<String, Object> queryCache;
public QueryCache() {
this.queryCache = Caffeine.newBuilder()
.maximumSize(1000)
.expireAfterWrite(10, TimeUnit.MINUTES)
.build();
}
public Object getCachedResult(String query, Object... params) {
String cacheKey = generateCacheKey(query, params);
return queryCache.getIfPresent(cacheKey);
}
public void cacheResult(String query, Object result, Object... params) {
String cacheKey = generateCacheKey(query, params);
queryCache.put(cacheKey, result);
}
private String generateCacheKey(String query, Object... params) {
StringBuilder keyBuilder = new StringBuilder(query);
for (Object param : params) {
keyBuilder.append("_").append(param);
}
return keyBuilder.toString();
}
}
// 批量操作优化
public static class BatchOperationOptimization {
public void batchInsert(List<User> users) {
String sql = "INSERT INTO users (name, age, email) VALUES (?, ?, ?)";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql)) {
connection.setAutoCommit(false);
for (User user : users) {
ps.setString(1, user.getName());
ps.setInt(2, user.getAge());
ps.setString(3, user.getEmail());
ps.addBatch();
}
int[] results = ps.executeBatch();
connection.commit();
log.info("批量插入完成,影响行数: {}", results.length);
} catch (SQLException e) {
log.error("批量插入失败", e);
}
}
public void batchUpdate(List<User> users) {
String sql = "UPDATE users SET name = ?, age = ? WHERE id = ?";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql)) {
connection.setAutoCommit(false);
for (User user : users) {
ps.setString(1, user.getName());
ps.setInt(2, user.getAge());
ps.setLong(3, user.getId());
ps.addBatch();
}
int[] results = ps.executeBatch();
connection.commit();
log.info("批量更新完成,影响行数: {}", results.length);
} catch (SQLException e) {
log.error("批量更新失败", e);
}
}
private DataSource dataSource;
private static class User {
private Long id;
private String name;
private int age;
private String email;
// getters and setters
public Long getId() { return id; }
public void setId(Long id) { this.id = id; }
public String getName() { return name; }
public void setName(String name) { this.name = name; }
public int getAge() { return age; }
public void setAge(int age) { this.age = age; }
public String getEmail() { return email; }
public void setEmail(String email) { this.email = email; }
}
}
// 读写分离
public static class ReadWriteSeparation {
private DataSource masterDataSource;
private DataSource slaveDataSource;
public Connection getReadConnection() throws SQLException {
return slaveDataSource.getConnection();
}
public Connection getWriteConnection() throws SQLException {
return masterDataSource.getConnection();
}
public void readOperation() {
try (Connection connection = getReadConnection();
PreparedStatement ps = connection.prepareStatement("SELECT * FROM users WHERE id = ?")) {
ps.setLong(1, 1L);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
// 处理读取结果
processReadResult(rs);
}
} catch (SQLException e) {
log.error("读取操作失败", e);
}
}
public void writeOperation() {
try (Connection connection = getWriteConnection();
PreparedStatement ps = connection.prepareStatement("INSERT INTO users (name, age) VALUES (?, ?)")) {
ps.setString(1, "John");
ps.setInt(2, 25);
ps.executeUpdate();
} catch (SQLException e) {
log.error("写入操作失败", e);
}
}
private void processReadResult(ResultSet rs) throws SQLException {
// 处理读取结果
}
}
// 数据库连接池优化
public static class DatabaseConnectionPoolOptimization {
public void optimizeConnectionPool() {
HikariConfig config = new HikariConfig();
// 连接池大小优化
config.setMaximumPoolSize(20); // 根据并发量调整
config.setMinimumIdle(5); // 保持最小空闲连接
// 连接超时优化
config.setConnectionTimeout(30000); // 30秒连接超时
config.setIdleTimeout(600000); // 10分钟空闲超时
config.setMaxLifetime(1800000); // 30分钟连接最大生命周期
// 性能优化
config.setLeakDetectionThreshold(60000); // 1分钟泄漏检测
config.setConnectionTestQuery("SELECT 1"); // 连接测试查询
// 连接池名称
config.setPoolName("Optimized-Pool");
}
}
}
🎯 数据库优化的实际应用
1. 高并发查询优化 🚀
@Service
public class HighConcurrencyQueryOptimizationService {
// 异步查询
@Async
public CompletableFuture<List<User>> queryUsersAsync(String condition) {
return CompletableFuture.supplyAsync(() -> {
try {
return queryUsers(condition);
} catch (SQLException e) {
log.error("异步查询失败", e);
return new ArrayList<>();
}
});
}
private List<User> queryUsers(String condition) throws SQLException {
// 执行查询
return new ArrayList<>();
}
// 查询结果缓存
@Cacheable(value = "users", key = "#condition")
public List<User> getCachedUsers(String condition) {
return queryUsers(condition);
}
// 分页查询优化
public Page<User> getUsersWithPagination(int page, int size) {
// 使用游标分页
long offset = (long) page * size;
String sql = "SELECT * FROM users WHERE id > ? ORDER BY id LIMIT ?";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql)) {
ps.setLong(1, offset);
ps.setInt(2, size);
ResultSet rs = ps.executeQuery();
List<User> users = new ArrayList<>();
while (rs.next()) {
User user = new User();
user.setId(rs.getLong("id"));
user.setName(rs.getString("name"));
user.setAge(rs.getInt("age"));
users.add(user);
}
return new Page<>(users, page, size);
} catch (SQLException e) {
log.error("分页查询失败", e);
return new Page<>(new ArrayList<>(), page, size);
}
}
private DataSource dataSource;
private static class User {
private Long id;
private String name;
private int age;
// getters and setters
public Long getId() { return id; }
public void setId(Long id) { this.id = id; }
public String getName() { return name; }
public void setName(String name) { this.name = name; }
public int getAge() { return age; }
public void setAge(int age) { this.age = age; }
}
private static class Page<T> {
private final List<T> content;
private final int page;
private final int size;
Page(List<T> content, int page, int size) {
this.content = content;
this.page = page;
this.size = size;
}
public List<T> getContent() { return content; }
public int getPage() { return page; }
public int getSize() { return size; }
}
}
2. 大数据处理优化 📊
@Service
public class BigDataProcessingOptimizationService {
// 流式处理大数据
public void processLargeDataset(String tableName) {
String sql = "SELECT * FROM " + tableName;
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql,
ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY);
ResultSet rs = ps.executeQuery()) {
// 设置fetch size
ps.setFetchSize(1000);
while (rs.next()) {
// 处理每一行数据
processRow(rs);
}
} catch (SQLException e) {
log.error("大数据处理失败", e);
}
}
private void processRow(ResultSet rs) throws SQLException {
// 处理行数据
}
// 分批处理大数据
public void processLargeDatasetInBatches(String tableName, int batchSize) {
String sql = "SELECT * FROM " + tableName + " LIMIT ? OFFSET ?";
int offset = 0;
boolean hasMore = true;
while (hasMore) {
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql)) {
ps.setInt(1, batchSize);
ps.setInt(2, offset);
ResultSet rs = ps.executeQuery();
List<DataRecord> batch = new ArrayList<>();
while (rs.next()) {
DataRecord record = new DataRecord();
record.setId(rs.getLong("id"));
record.setData(rs.getString("data"));
batch.add(record);
}
if (batch.isEmpty()) {
hasMore = false;
} else {
processBatch(batch);
offset += batchSize;
}
} catch (SQLException e) {
log.error("分批处理失败", e);
hasMore = false;
}
}
}
private void processBatch(List<DataRecord> batch) {
// 处理批次数据
}
private static class DataRecord {
private Long id;
private String data;
// getters and setters
public Long getId() { return id; }
public void setId(Long id) { this.id = id; }
public String getData() { return data; }
public void setData(String data) { this.data = data; }
}
private DataSource dataSource;
}
🛡️ 数据库优化的注意事项
1. 索引维护 📊
@Service
public class IndexMaintenanceService {
public void maintainIndexes() {
// 分析表统计信息
analyzeTables();
// 重建索引
rebuildIndexes();
// 清理无用索引
cleanupUnusedIndexes();
}
private void analyzeTables() {
String sql = "ANALYZE TABLE users, orders, products";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql)) {
ps.execute();
log.info("表统计信息分析完成");
} catch (SQLException e) {
log.error("表统计信息分析失败", e);
}
}
private void rebuildIndexes() {
String[] tables = {"users", "orders", "products"};
for (String table : tables) {
String sql = "ALTER TABLE " + table + " ENGINE=InnoDB";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql)) {
ps.execute();
log.info("表 {} 索引重建完成", table);
} catch (SQLException e) {
log.error("表 {} 索引重建失败", table, e);
}
}
}
private void cleanupUnusedIndexes() {
// 清理无用索引
String sql = "SELECT TABLE_NAME, INDEX_NAME FROM INFORMATION_SCHEMA.STATISTICS WHERE CARDINALITY = 0";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql);
ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
String tableName = rs.getString("TABLE_NAME");
String indexName = rs.getString("INDEX_NAME");
// 删除无用索引
String dropSql = "DROP INDEX " + indexName + " ON " + tableName;
try (PreparedStatement dropPs = connection.prepareStatement(dropSql)) {
dropPs.execute();
log.info("删除无用索引: {}.{}", tableName, indexName);
}
}
} catch (SQLException e) {
log.error("清理无用索引失败", e);
}
}
private DataSource dataSource;
}
2. 性能监控 🚨
@Service
public class DatabasePerformanceMonitoringService {
public void monitorDatabasePerformance() {
// 监控慢查询
monitorSlowQueries();
// 监控连接池状态
monitorConnectionPool();
// 监控数据库状态
monitorDatabaseStatus();
}
private void monitorSlowQueries() {
String sql = "SELECT * FROM mysql.slow_log ORDER BY start_time DESC LIMIT 10";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql);
ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
String query = rs.getString("sql_text");
long executionTime = rs.getLong("query_time");
if (executionTime > 1000) { // 超过1秒的查询
log.warn("慢查询检测: 执行时间={}ms, 查询={}", executionTime, query);
}
}
} catch (SQLException e) {
log.error("慢查询监控失败", e);
}
}
private void monitorConnectionPool() {
if (dataSource instanceof HikariDataSource) {
HikariDataSource hikariDataSource = (HikariDataSource) dataSource;
HikariPoolMXBean poolBean = hikariDataSource.getHikariPoolMXBean();
log.info("连接池状态: 活跃={}, 空闲={}, 总计={}",
poolBean.getActiveConnections(),
poolBean.getIdleConnections(),
poolBean.getTotalConnections());
}
}
private void monitorDatabaseStatus() {
String sql = "SHOW STATUS LIKE 'Threads_connected'";
try (Connection connection = dataSource.getConnection();
PreparedStatement ps = connection.prepareStatement(sql);
ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
int connectedThreads = rs.getInt("Value");
log.info("数据库连接数: {}", connectedThreads);
}
} catch (SQLException e) {
log.error("数据库状态监控失败", e);
}
}
private DataSource dataSource;
}
📊 数据库优化监控:让性能可视化
@Component
public class DatabaseOptimizationMonitor {
private final MeterRegistry meterRegistry;
private final Timer queryTimer;
private final Counter queryCounter;
private final Gauge connectionPoolSize;
public DatabaseOptimizationMonitor(MeterRegistry meterRegistry) {
this.meterRegistry = meterRegistry;
this.queryTimer = Timer.builder("database.query.time")
.register(meterRegistry);
this.queryCounter = Counter.builder("database.query.count")
.register(meterRegistry);
this.connectionPoolSize = Gauge.builder("database.connectionpool.size")
.register(meterRegistry);
}
public void recordQueryExecution(Duration duration, String queryType) {
queryTimer.record(duration);
queryCounter.increment(Tags.of("query", queryType));
}
public void recordConnectionPoolSize(int size) {
connectionPoolSize.set(size);
}
}
🎉 总结:数据库优化让数据"存储"更高效
数据库优化就像生活中的各种"管理"技巧:
- 查询优化 = 优化搜索方法 🏃
- 连接管理 = 管理电话线路 🔗
- 事务优化 = 优化银行转账 ⚡
- 性能优化 = 优化汽车引擎 🚀
通过合理使用数据库优化,我们可以:
- 🚀 大幅提升查询性能
- ⚡ 改善系统响应
- 🎯 提高资源利用率
- 💪 增强系统稳定性
记住:数据库优化不是万能的,但它是性能提升的核心! 合理使用数据库优化,让你的Java应用运行如图书馆般高效! ✨
"数据库优化就像魔法,让数据存储更高效,让性能更卓越!" 🪄🗄️