上篇文章讲了如何在docker中搭建clickhouse,本篇记录一下在springboot中如何集成clickhouse并进行读写
<dependency>
<groupId>com.baomidou</groupId>
<artifactId>mybatis-plus-boot-starter</artifactId>
<version>3.4.3.4</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
</dependency>
<!--升级 druid驱动 1.1.10支持ClickHouse-->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.13</version>
</dependency>
@Configuration
public class DruidConfig {
@Bean
public DataSource dataSource(){
DruidDataSource dataSource = new DruidDataSource();
dataSource.setUrl("jdbc:clickhouse://localhost:8123/test");
dataSource.setInitialSize(10);
dataSource.setMaxActive(100);
dataSource.setMinIdle(10);
dataSource.setMaxWait(-1);
return dataSource;
}
}
package cn.yufire.sync.sls.getway.logs.pojo;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.*;
import java.io.Serializable;
@Builder
@AllArgsConstructor
@NoArgsConstructor
@TableName(value = "getway_logs")
@Data
public class GetwayLogs implements Serializable {
/**
* id
*/
@TableField(value = "id")
private Long id;
/**
* 接口名称:网关上定义的名称
*/
@TableField(value = "api_name")
private String apiName;
/**
* 响应体
*/
@TableField(value = "response_body")
private String responseBody;
/**
* 调用环境:TEST、RELEASE、PRE
*/
@TableField(value = "api_stage_name")
private String apiStageName;
/**
* 错误码
*/
@TableField(value = "error_code")
private String errorCode;
/**
* 请求类型
*/
@TableField(value = "http_method")
private String httpMethod;
/**
* 接口请求地址
*/
@TableField(value = "api_path")
private String apiPath;
/**
* 接口请求全地址
*/
@TableField(value = "api_full_path")
private String apiFullPath;
/**
* 请求时间
*/
@TableField(value = "request_time")
private String requestTime;
/**
* 请求体
*/
@TableField(value = "request_body")
private String requestBody;
/**
* 网关请求阿里云创建
*/
@TableField(value = "getway_request_id")
private String getwayRequestId;
/**
* 阿里云网关应用id
*/
@TableField(value = "getway_app_id")
private String getwayAppId;
/**
* 请求协议,HTTP、HTTPS、...
*/
@TableField(value = "request_protocol")
private String requestProtocol;
/**
* 客户端调用产生的随机字符串
*/
@TableField(value = "client_nonce")
private String clientNonce;
/**
* 网关应用名称
*/
@TableField(value = "getway_app_name")
private String getwayAppName;
/**
* 网关分组id
*/
@TableField(value = "getway_group_id")
private String getwayGroupId;
/**
* 客户端ip
*/
@TableField(value = "client_ip")
private String clientIp;
/**
* 网关绑定域名
*/
@TableField(value = "getway_bind_domain")
private String getwayBindDomain;
/**
* 请求体大小
*/
@TableField(value = "request_size")
private Integer requestSize;
/**
* 响应体大小
*/
@TableField(value = "response_size")
private Integer responseSize;
/**
* 后端应用响应HTTP状态码
*/
@TableField(value = "app_response_code")
private Integer appResponseCode;
/**
* 分组名称
* */
@TableField(value = "apiGroupName")
private String apiGroupName;
}
因为我使用clickhouse只需要批量插入,所以就写了一个批量插入的sql,当然mybatis-plus也有自带的批量插入的方法,但是不是一条sql执行的,而是通过批量执行多条 SQL 语句来实现的,所以就手写了一个批次插入的sql,这里有一个坑,批量插入clickhouse会报错,单条插入没问题,百度了发现批量插入各种小问题,需要在 Values前加一个FORMAT ,即 insert into x(xx,xx) FORMAT Values (),处理后就没有报错了
package cn.yufire.sync.sls.getway.logs.mapper;
import cn.yufire.sync.sls.getway.logs.pojo.GetwayLogs;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Insert;
import java.util.List;
public interface GetwayLogsMapper extends BaseMapper<GetwayLogs> {
/**
* 批量插入网关日志
*
* @param list 数据
* @return
*/
@Insert("<script>insert into getway_logs(" +
"api_name," +
"response_body," +
"api_stage_name," +
"error_code," +
"http_method," +
"api_path," +
"api_full_path," +
"request_time," +
"request_body," +
"getway_request_id," +
"getway_app_id," +
"request_protocol," +
"client_nonce," +
"getway_app_name," +
"getway_group_id," +
"client_ip," +
"getway_bind_domain," +
"request_size," +
"response_size," +
"app_response_code" +
") FORMAT Values " +
" <foreach collection=\"list\" item=\"item\" index=\"index\" separator=\",\">\n" +
" (" +
"#{item.apiName}," +
"#{item.responseBody}," +
"#{item.apiStageName}," +
"#{item.errorCode}," +
"#{item.httpMethod}," +
"#{item.apiPath}," +
"#{item.apiFullPath}," +
"#{item.requestTime}," +
"#{item.requestBody}," +
"#{item.getwayRequestId}," +
"#{item.getwayAppId}," +
"#{item.requestProtocol}," +
"#{item.clientNonce}," +
"#{item.getwayAppName}," +
"#{item.getwayGroupId}," +
"#{item.clientIp}," +
"#{item.getwayBindDomain}," +
"#{item.requestSize}," +
"#{item.responseSize}," +
"#{item.appResponseCode}" +
")\n" +
" </foreach>" +
"</script>")
Integer batchInsert(List<GetwayLogs> list);
}
在业务中调用
@Autowired
private GetwayLogsMapper getwayLogsMapper;
log.info("批量插入至数据库中...,是否添加成功:{}", getwayLogsMapper.batchInsert(logs));
可以看到增删改查操作和操作mysql的一模一样,也就是说给clickhouse当成mysql用就行
在springboot中集成clickhouse中也遇到了一些报错
ClickHouse exception, code: 1002
使用了clickhouse的依赖和jpa依赖总是跑不起来,一直报没有驱动,给了驱动也不对
clickhouse集成springboot报错Unable to determine Dialect to use [name=ClickHouse, majorVersion=22]; user must register resolver or explicitly set 'hibernate.dialect'
百度了各种最后看到这个,直接给clickhouse的依赖和jpa的依赖去掉就给clickhouse当成mysql结果就正常了