View Javadoc
1   package fr.paris.lutece.plugins.knowledge.service;
2   
3   import java.net.InetSocketAddress;
4   import java.net.Proxy;
5   import java.net.SocketAddress;
6   import java.time.Duration;
7   import java.util.Collections;
8   import java.util.List;
9   import java.util.concurrent.CompletableFuture;
10  import java.util.stream.Collectors;
11  import javax.servlet.http.HttpServletRequest;
12  
13  import org.apache.commons.lang3.tuple.Pair;
14  import org.glassfish.jersey.media.sse.EventOutput;
15  import dev.langchain4j.data.embedding.Embedding;
16  import dev.langchain4j.data.segment.TextSegment;
17  import dev.langchain4j.memory.chat.ChatMemoryProvider;
18  import dev.langchain4j.memory.chat.MessageWindowChatMemory;
19  import dev.langchain4j.model.chat.StreamingChatLanguageModel;
20  import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
21  import dev.langchain4j.model.output.Response;
22  import dev.langchain4j.service.AiServices;
23  import dev.langchain4j.service.MemoryId;
24  import dev.langchain4j.service.TokenStream;
25  import dev.langchain4j.service.UserMessage;
26  import dev.langchain4j.store.embedding.EmbeddingMatch;
27  import dev.langchain4j.store.embedding.EmbeddingStore;
28  import fr.paris.lutece.plugins.knowledge.business.Bot;
29  import fr.paris.lutece.plugins.knowledge.business.BotHome;
30  import fr.paris.lutece.plugins.knowledge.business.Dataset;
31  import fr.paris.lutece.plugins.knowledge.business.DatasetHome;
32  import fr.paris.lutece.plugins.knowledge.rs.BotResponse;
33  import fr.paris.lutece.plugins.knowledge.rs.RequestData;
34  import fr.paris.lutece.plugins.knowledge.service.ChatMemoryService.PersistentChatMemoryStore;
35  
36  
37  public class ChatService
38  {
39  
40      public static CompletableFuture<Void> run( HttpServletRequest request, RequestData data, EventOutput output, String sessionId )
41      {
42          BotResponsee/rs/BotResponse.html#BotResponse">BotResponse botResponse = new BotResponse( output, request.getSession( ) );
43          botResponse.initStep( 0, Constant.STEP_CHAT );
44          CompletableFuture<Void> stepFuture = new CompletableFuture<>( );
45          int botId = Integer.parseInt( data.getBotId( ) );
46          Bot bot = BotHome.findByPrimaryKey( botId ).get( );
47  
48  
49          Pair<PersistentChatMemoryStore, String> chatMemoryStore = ChatMemoryService.getChatMemory( request, data, bot, sessionId );
50  
51          ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder().id( memoryId ).maxMessages(10).chatMemoryStore( chatMemoryStore.getLeft( ) ).build();
52          StreamingChatLanguageModel chatLanguageModel = buildChatLanguageModel( bot );
53          StreamingAssistant assistant = AiServices.builder( StreamingAssistant.class ).streamingChatLanguageModel( chatLanguageModel ).chatMemoryProvider( chatMemoryProvider )
54                  .build( );
55  
56                  
57          if ( bot.getDatasetId( ) != 0 )
58          {
59              Dataset dataSet = DatasetHome.findByPrimaryKey( bot.getDatasetId( ) ).get( );
60              EmbeddingStore<TextSegment> embeddingStore = ElasticStoreService.getEmbeddingStore( bot.getDatasetId( ) );
61              String promptText = generatePromptText( data, embeddingStore, dataSet );
62              return processChatStream( assistant, botResponse, stepFuture, promptText, chatMemoryStore.getRight( ) );
63          }
64          else
65          {
66              return processChatStream( assistant, botResponse, stepFuture, data.getQuestion( ), chatMemoryStore.getRight( ) );
67          }
68      }
69  
70      private static StreamingChatLanguageModel buildChatLanguageModel( Bot bot )
71      {
72          if ( Constant.PROXY_HOST != null && Constant.PROXY_PORT != null )
73          {
74              SocketAddress _proxyAddress = new InetSocketAddress( Constant.PROXY_HOST, Integer.valueOf( Constant.PROXY_PORT ) );
75              Proxy proxy = new Proxy( Proxy.Type.HTTP, _proxyAddress );
76              return OpenAiStreamingChatModel.builder( ).proxy( proxy ).apiKey( Constant.API_KEY ).modelName( bot.getModelId( ) ).temperature( 0.0 )
77                      .timeout( Duration.ofSeconds( 30 ) ).build( );
78          }
79          else
80          {
81              return OpenAiStreamingChatModel.builder( ).apiKey( Constant.API_KEY ).modelName( bot.getModelId( ) ).temperature( 0.0 )
82                      .timeout( Duration.ofSeconds( 30 ) ).build( );
83          }
84      }
85  
86      private static CompletableFuture<Void> processChatStream( StreamingAssistant assistant, BotResponse botResponse, CompletableFuture<Void> stepFuture,
87              String inputText, String memoryId )
88      {
89          TokenStream chatStream = assistant.chat( memoryId, inputText );
90          StringBuilder responseBuilder = new StringBuilder( );
91          chatStream.onNext( token -> {
92              responseBuilder.append( token );
93              botResponse.updateStep( 0, responseBuilder.toString( ) );
94          } ).onComplete( token -> {
95              stepFuture.complete( null );
96          } ).onError( e -> {
97              botResponse.updateStep( 0, e.getMessage( ) );
98              stepFuture.completeExceptionally( e );
99          } ).start( );
100         return stepFuture;
101     }
102 
103     private static String generatePromptText( RequestData data, EmbeddingStore<TextSegment> embeddingStore, Dataset dataSet )
104     {
105         Response<Embedding> questionEmbedding = ElasticStoreService.getEmbeddingModel( ).embed( data.getQuestion( ) );
106         List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant( questionEmbedding.content( ), dataSet.getSearchMaxRecord( ), 0.7 );
107         String embeddingMatchText = relevantEmbeddings.stream( ).map( match -> match.embedded( ).text( ) ).collect( Collectors.joining( "\n\n" ) );
108         List<String> fileNamesSources = Collections.singletonList( "lutece.pdf" );
109         return PromptUtils.generateQAPrompt( data.getQuestion( ), embeddingMatchText, fileNamesSources, dataSet.getMatchInstruction( ),
110                 dataSet.getMismatchInstruction( ) ).text( );
111     }
112 
113     interface StreamingAssistant
114     {
115         TokenStream chat( @MemoryId String memoryId, @UserMessage String userMessage);
116     }
117 }