Skip to content

Commit

Permalink
Address reviewer comments.
Browse files Browse the repository at this point in the history
Signed-off-by: Austin Lee <austin@aryn.ai>
  • Loading branch information
austintlee committed Apr 23, 2024
1 parent 59193e9 commit 12975bb
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ public void processResponseAsync(
PipelineProcessingContext requestContext,
ActionListener<SearchResponse> responseListener
) {
log.info("Entering processResponse.");
log.debug("Entering processResponse.");

if (!this.featureFlagSupplier.getAsBoolean()) {
throw new MLException(GenerativeQAProcessorConstants.FEATURE_NOT_ENABLED_ERROR_MSG);
Expand All @@ -132,7 +132,7 @@ public void processResponseAsync(
t = DEFAULT_PROCESSOR_TIME_IN_SECONDS;
}
final int timeout = t;
log.info("Timeout for this request: {} seconds.", timeout);
log.debug("Timeout for this request: {} seconds.", timeout);

String llmQuestion = params.getLlmQuestion();
String llmModel = params.getLlmModel() == null ? this.llmModel : params.getLlmModel();
Expand All @@ -144,13 +144,12 @@ public void processResponseAsync(
if (conversationId != null && !Strings.hasText(conversationId)) {
throw new IllegalArgumentException("Empty conversation_id is not allowed.");
}
// log.info("LLM question: {}, LLM model {}, conversation id: {}", llmQuestion, llmModel, conversationId);
Instant start = Instant.now();
Integer interactionSize = params.getInteractionSize();
if (interactionSize == null || interactionSize == GenerativeQAParameters.SIZE_NULL_VALUE) {
interactionSize = DEFAULT_CHAT_HISTORY_WINDOW;
}
log.info("Using interaction size of {}", interactionSize);
log.debug("Using interaction size of {}", interactionSize);

Integer topN = params.getContextSize();
if (topN == null) {
Expand All @@ -168,9 +167,6 @@ public void processResponseAsync(
effectiveUserInstructions = params.getUserInstructions();
}

// log.info("system_prompt: {}", systemPrompt);
// log.info("user_instructions: {}", userInstructions);

final List<Interaction> chatHistory = new ArrayList<>();
if (conversationId == null) {
doChatCompletion(
Expand All @@ -194,7 +190,7 @@ public void processResponseAsync(
} else {
final Instant memoryStart = Instant.now();
memoryClient.getInteractions(conversationId, interactionSize, ActionListener.wrap(r -> {
log.info("getInteractions complete. ({})", getDuration(memoryStart));
log.debug("getInteractions complete. ({})", getDuration(memoryStart));
chatHistory.addAll(r);
doChatCompletion(
LlmIOUtil
Expand Down Expand Up @@ -231,7 +227,7 @@ private void doChatCompletion(
llm.doChatCompletion(input, new ActionListener<>() {
@Override
public void onResponse(ChatCompletionOutput output) {
log.info("doChatCompletion complete. ({})", getDuration(chatStart));
log.debug("doChatCompletion complete. ({})", getDuration(chatStart));

final String answer = getAnswer(output);
final String errorMessage = getError(output);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,6 @@ public void onResponse(MLOutput mlOutput) {
.getMlModelTensors()
.get(0)
.getDataAsMap();
// log.info("dataAsMap: {}", dataAsMap.toString());
listener
.onResponse(
buildChatCompletionOutput(
Expand Down

0 comments on commit 12975bb

Please sign in to comment.