聊天有两个接口,第一个是获取token, 第二个是聊天接口,具体参照官方文档
下面是流式调用聊天接口,单次的,不含上下文
@Value("${qianfan.apiKey}")
private String apiKey;
@Value("${qianfan.secretKey}")
private String secretKey;
@Value("${qianfan.tokenUrl}")
private String tokenUrl;
@Value("${qianfan.chatUrl}")
private String chatUrl;
private final OkHttpClient client = new OkHttpClient();
private static final String ACCESS_TOKEN_PRE = "?access_token=";
@SneakyThrows
public Flux<String> getAnswer(Message message, String token) {
RequestMessage requestBody = new RequestMessage();
requestBody.addMessage(message);
String jsonStr = JSON.toJSONString(requestBody);
MediaType mediaType = MediaType.parse("application/json");
RequestBody body = RequestBody.create(mediaType, jsonStr);
Request request = new Request.Builder()
.url(chatUrl + ACCESS_TOKEN_PRE + token)
.addHeader("Content-Type", "application/json")
.method("POST", body)
.build();
Response response = client.newCall(request).execute();
ResponseBody responseBody = response.body();
if (responseBody != null) {
BufferedReader reader = new BufferedReader(responseBody.charStream());
return Flux.<String>generate(sink -> { // 流式响应式接口
try {
String line = reader.readLine();
line = line.replace("data: ", "");
JSONObject json = JSON.parseObject(line);
if (null != json && StringUtils.isNotBlank((String) json.get("result"))) {
sink.next((String) json.get("result")); // 返回答案
} else if (null != json && (Boolean) json.get("is_end")) {
sink.complete(); // 结束响应
} else {
sink.next("\u200B"); // 返回空白字符
}
} catch (IOException e) {
sink.error(e);
}
}).doFinally(signalType -> {
try {
reader.close();
responseBody.close();
} catch (IOException e) {
// handle exception
}
});
}
return Flux.empty();
}
对应controller
@GetMapping(value = "/chat", produces = MediaType.APPLICATION_STREAM_JSON_VALUE)
@CrossOrigin(origins = "*")
public Flux<String> chat(@Valid Message vo){
return xxxService.chat(vo);
}
入参
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Message {
/**
* 用户角色
* 目前支持:
* user 用户
* assistant 对话助手
*/
@ApiModelProperty(value = "用户角色", example = "user",hidden = true)
private String role = "user";
/**
* 对话内容。
*/
@ApiModelProperty(value = "对话内容")
@NotBlank(message = "对话内容不能为空")
private String content;
}
@Data
public class RequestMessage {
/**
* 聊天上下文
*/
List<Message> messages = new ArrayList<>();
/**
* 范围(0~1.0]
* 较高的数值会使输出更加随机
*/
float temperature = Float.parseFloat("0.95");
/**
* 影响文本的多样性,取值越大生成的文本多样性越强
* 建议该参数与temperature只设置一个。建议top_p和temperature不要同时更改
*/
float top_p = Float.parseFloat("0.8");
/**
* 通过对已生成的token增加惩罚,减少重复生成的现象
* 值越大,惩罚越大
* 取值范围[1,2]
*/
float penalty_score = Float.parseFloat("1.0");
/**
* 是否以流式接口形式返回数据
*/
boolean stream = true;
/**
* 模型人设
*/
String system = null;
/**
* 表示用户唯一标识符,用于监测和检测滥用行为。防止接口恶意调用。
*/
String user_id = "";
public void addMessage(Message message){
this.messages.add(message);
}
}
部署服务器后,不是流式响应的话,需要在NGINX的配置文件中加上:
proxy_buffering off;
http {
include mime.types;
default_type application/octet-stream;
proxy_buffering off;
client_max_body_size 5000M;
#log_format main '$remote_addr - $remote_user [$time_local] "$request" '
# '$status $body_bytes_sent "$http_referer" '
# '"$http_user_agent" "$http_x_forwarded_for" "$server_protocol"';
然后重启NGINX即可!