This commit is contained in:
2025-08-27 19:57:44 +08:00
parent 18913dc81f
commit ec6601a12a

View File

@@ -0,0 +1,139 @@
/*
* Copyright (c) 2023-2025, Agents-Flex (fuhai999@gmail.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agentsflex.llm.gitee;
import com.agentsflex.core.document.Document;
import com.agentsflex.core.llm.BaseLlm;
import com.agentsflex.core.llm.ChatOptions;
import com.agentsflex.core.llm.StreamResponseListener;
import com.agentsflex.core.llm.client.BaseLlmClientListener;
import com.agentsflex.core.llm.client.HttpClient;
import com.agentsflex.core.llm.client.LlmClient;
import com.agentsflex.core.llm.client.LlmClientListener;
import com.agentsflex.core.llm.client.impl.SseClient;
import com.agentsflex.core.llm.embedding.EmbeddingOptions;
import com.agentsflex.core.llm.response.AiMessageResponse;
import com.agentsflex.core.parser.AiMessageParser;
import com.agentsflex.core.prompt.Prompt;
import com.agentsflex.core.store.VectorData;
import com.agentsflex.core.util.LogUtil;
import com.agentsflex.core.util.Maps;
import com.agentsflex.core.util.StringUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
public class GiteeAiLlm extends BaseLlm<GiteeAiLlmConfig> {
private final HttpClient httpClient = new HttpClient();
public AiMessageParser aiMessageParser = GiteeAiLlmUtil.getAiMessageParser(false);
public AiMessageParser streamMessageParser = GiteeAiLlmUtil.getAiMessageParser(true);
public GiteeAiLlm(GiteeAiLlmConfig config) {
super(config);
}
@Override
public AiMessageResponse chat(Prompt prompt, ChatOptions options) {
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Authorization", "Bearer " + getConfig().getApiKey());
Consumer<Map<String, String>> headersConfig = config.getHeadersConfig();
if (headersConfig != null) {
headersConfig.accept(headers);
}
String payload = GiteeAiLlmUtil.promptToPayload(prompt, config, options, false);
if (config.isDebug()) {
LogUtil.println(">>>>send payload:" + payload);
}
String endpoint = config.getEndpoint();
String model = Optional.ofNullable(options.getModel()).orElse(config.getModel());
String response = httpClient.post(endpoint + "/api/serverless/" + model + "/chat/completions", headers, payload);
if (config.isDebug()) {
LogUtil.println(">>>>receive payload:" + response);
}
if (StringUtil.noText(response)) {
return AiMessageResponse.error(prompt, response, "no content for response.");
}
JSONObject jsonObject = JSON.parseObject(response);
JSONObject error = jsonObject.getJSONObject("error");
AiMessageResponse messageResponse = new AiMessageResponse(prompt, response, aiMessageParser.parse(jsonObject));
if (error != null && !error.isEmpty()) {
messageResponse.setError(true);
messageResponse.setErrorMessage(error.getString("message"));
messageResponse.setErrorType(error.getString("type"));
messageResponse.setErrorCode(error.getString("code"));
}
return messageResponse;
}
@Override
public void chatStream(Prompt prompt, StreamResponseListener listener, ChatOptions options) {
LlmClient llmClient = new SseClient();
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Authorization", "Bearer " + getConfig().getApiKey());
String payload = GiteeAiLlmUtil.promptToPayload(prompt, config, options, true);
String endpoint = config.getEndpoint();
String model = Optional.ofNullable(options.getModel()).orElse(config.getModel());
LlmClientListener clientListener = new BaseLlmClientListener(this, llmClient, listener, prompt, streamMessageParser);
llmClient.start(endpoint + "/api/serverless/" + model + "/chat/completions", headers, payload, clientListener, config);
}
@Override
public VectorData embed(Document document, EmbeddingOptions options) {
Map<String, String> headers = new HashMap<>();
headers.put("Content-Type", "application/json");
headers.put("Authorization", "Bearer " + getConfig().getApiKey());
String payload = Maps.of("inputs", document.getContent()).toJSON();
String endpoint = config.getEndpoint();
String embeddingModel = options.getModelOrDefault(config.getDefaultEmbeddingModal());
String response = httpClient.post(endpoint + "/api/serverless/" + embeddingModel + "/embeddings", headers, payload);
if (config.isDebug()) {
LogUtil.println(">>>>receive payload:" + response);
}
if (StringUtil.noText(response)) {
return null;
}
VectorData vectorData = new VectorData();
double[] embedding = JSONObject.parseObject(response, double[].class);
vectorData.setVector(embedding);
return vectorData;
}
}