设置Ai中的temperature

This commit is contained in:
chenxudong 2025-04-08 11:16:19 +08:00
parent 8dab45c624
commit c695c3b404
1 changed files with 20 additions and 3 deletions

View File

@ -30,6 +30,7 @@ import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.document.Document; import org.springframework.ai.document.Document;
import org.springframework.ai.document.DocumentReader; import org.springframework.ai.document.DocumentReader;
import org.springframework.ai.ollama.OllamaChatModel; import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.reader.TextReader; import org.springframework.ai.reader.TextReader;
import org.springframework.ai.reader.pdf.PagePdfDocumentReader; import org.springframework.ai.reader.pdf.PagePdfDocumentReader;
import org.springframework.ai.transformer.splitter.TokenTextSplitter; import org.springframework.ai.transformer.splitter.TokenTextSplitter;
@ -241,7 +242,16 @@ public class ChatService {
UserThreadLocal.set(userLoginInfo); UserThreadLocal.set(userLoginInfo);
aiQuestionRecordMapper.insert(record); aiQuestionRecordMapper.insert(record);
UserThreadLocal.remove(); UserThreadLocal.remove();
return ChatClient.builder(model).defaultAdvisors(messageChatMemoryAdvisor, questionAnswerAdvisor).build().prompt(queryDTO.getMsg()).stream().content(); return ChatClient.builder(model)
.defaultAdvisors(messageChatMemoryAdvisor, questionAnswerAdvisor)
.defaultOptions(OllamaOptions
.builder()
.temperature(0.3)
.build())
.build()
.prompt(queryDTO.getMsg())
.stream()
.content();
} }
public Flux<ChatResponse> chatStreamResponse(QueryDTO queryDTO) { public Flux<ChatResponse> chatStreamResponse(QueryDTO queryDTO) {
@ -251,8 +261,15 @@ public class ChatService {
UserThreadLocal.set(userLoginInfo); UserThreadLocal.set(userLoginInfo);
UserThreadLocal.remove(); UserThreadLocal.remove();
aiQuestionRecordMapper.insert(record); aiQuestionRecordMapper.insert(record);
return ChatClient.builder(model).defaultAdvisors(messageChatMemoryAdvisor, questionAnswerAdvisor).build() return ChatClient.builder(model)
.defaultAdvisors(messageChatMemoryAdvisor, questionAnswerAdvisor)
.defaultOptions(OllamaOptions
.builder()
.temperature(0.3)
.build())
.build()
.prompt(new Prompt(new UserMessage(queryDTO.getMsg()))) .prompt(new Prompt(new UserMessage(queryDTO.getMsg())))
.stream().chatResponse(); .stream()
.chatResponse();
} }
} }