Skip to content

Commit

Permalink
Added "Request Timeout" config, and moved 'o1' tests into isO1Model()
Browse files Browse the repository at this point in the history
  • Loading branch information
jukofyork committed Nov 9, 2024
1 parent 75f179b commit b397cdd
Show file tree
Hide file tree
Showing 6 changed files with 53 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -51,17 +51,19 @@ private Constants() {
public static final int DEFAULT_INTERNAL_SPACING = 2;

// =============================================================================

// Response timeout.
// NOTE: Needs to be really high as slow models (like `o1-preview`, etc) can take ages to reply.
public static final int DEFAULT_REQUEST_TIMEOUT = 10*60*1000; // 10 minutes for `o1-preview` seems OK

// Connection timeout.
// NOTE: A short connection timeout stops the preference page from stalling.
public static final int MIN_CONNECTION_TIMEOUT = 500;
public static final int MAX_CONNECTION_TIMEOUT = 10000;
public static final int DEFAULT_CONNECTION_TIMEOUT = 1000;

// Response timeout.
// NOTE: The new `o1` models can take ages to reply... 60 seconds should hopefully be enough.
public static final int MIN_REQUEST_TIMEOUT = 5000;
public static final int MAX_REQUEST_TIMEOUT = 600000;
public static final int DEFAULT_REQUEST_TIMEOUT = 60000;

// Temperature value.
// NOTE: Coding LLMs need a much lower (preferably zero) temperature vs chat LLMs.
public static final double MIN_TEMPERATURE = 0.0; // Zero temperature.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,11 +164,11 @@ public Runnable run(ChatConversation chatConversation) {
getApiEndpoint(Preferences.getCurrentApiUrl(), Constants.CHAT_COMPLETION_API_URL).toURI(),
Preferences.getCurrentApiKey(),
Duration.ofMillis(Preferences.getConnectionTimeout()),
Duration.ofMinutes(Constants.DEFAULT_REQUEST_TIMEOUT));
Duration.ofMillis(Preferences.getRequestTimeout()));
HttpResponse<InputStream> streamingResponse = httpClientWrapper.sendRequest(
buildChatCompletionRequestBody(modelName, chatConversation));
// NOTE: We can't use streaming for "o1-mini" or "o1-preview" models.
if (!Preferences.useStreaming() || modelName.contains("o1-mini") || modelName.contains("o1-preview")) {
// NOTE: We can't use streaming for 'o1-mini' or 'o1-preview' models.
if (!Preferences.useStreaming() || isO1Model(modelName)) {
processResponse(streamingResponse);
}
else {
Expand Down Expand Up @@ -206,8 +206,8 @@ private String buildChatCompletionRequestBody(String modelName, ChatConversation
requestBody.put("model", modelName);

// Add the message history so far.
// NOTE: We can't use a system message for "o1-mini" or "o1-preview" models.
if (!modelName.contains("o1-mini") && !modelName.contains("o1-preview")) {
// NOTE: We can't use a system message for 'o1-mini' or 'o1-preview' models.
if (!isO1Model(modelName)) {
var systemMessage = objectMapper.createObjectNode();
systemMessage.put("role", "system");
systemMessage.put("content", PromptLoader.getSystemPromptText());
Expand Down Expand Up @@ -242,14 +242,14 @@ private String buildChatCompletionRequestBody(String modelName, ChatConversation
requestBody.set("messages", jsonMessages);

// Add the temperature to the request.
// NOTE: We can't set temperature for "o1-mini" or "o1-preview" models.
if (!modelName.contains("o1-mini") && !modelName.contains("o1-preview")) {
// NOTE: We can't set temperature for 'o1-mini' or 'o1-preview' models.
if (!isO1Model(modelName)) {
requestBody.put("temperature", Preferences.getCurrentTemperature());
}

// Set the streaming flag.
// NOTE: We can't use streaming for "o1-mini" or "o1-preview" models.
if (!Preferences.useStreaming() || modelName.contains("o1-mini") || modelName.contains("o1-preview")) {
// NOTE: We can't use streaming for 'o1-mini' or 'o1-preview' models.
if (!Preferences.useStreaming() || isO1Model(modelName)) {
requestBody.put("stream", false);
} else {
requestBody.put("stream", true);
Expand Down Expand Up @@ -463,5 +463,19 @@ private static URL getApiEndpoint(String apiUrl, String path) {
throw new RuntimeException("Invalid API base URL", e);
}
}

/**
* Determines if the given model is an O1-series model with limited capabilities.
* O1 models have specific restrictions: they cannot use streaming responses,
* do not support system messages, and cannot have their temperature parameter modified.
*
* @param modelName the name of the OpenAI model to check
* @return true if the model is an O1-series model (o1-mini or o1-preview),
* false otherwise
* @since 1.0
*/
private static boolean isO1Model(String modelName) {
return modelName.contains("o1-mini") || modelName.contains("o1-preview");
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
*/
public class PreferenceConstants {
public static final String CONNECTION_TIMEOUT = "CONNECTION_TIMEOUT";
public static final String REQUEST_TIMEOUT = "REQUEST_TIMEOUT";
public static final String CHAT_FONT_SIZE = "CHAT_FONT_SIZE";
public static final String NOTIFICATION_FONT_SIZE = "NOTIFICATION_FONT_SIZE";
public static final String USE_STREAMING = "USE_STREAMING";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ public class PreferenceInitializer extends AbstractPreferenceInitializer {
@Override
public void initializeDefaultPreferences() {
setDefaultConnectionTimeout();
setDefaultRequestTimeout();
setDefaultChatFontSize();
setDefaultNotificationFontSize();
setDefaultUseStreaming();
Expand All @@ -42,6 +43,13 @@ public void initializeDefaultPreferences() {
private void setDefaultConnectionTimeout() {
Preferences.getDefault().setDefault(PreferenceConstants.CONNECTION_TIMEOUT, Constants.DEFAULT_CONNECTION_TIMEOUT);
}

/**
* Sets the default request timeout for API requests.
*/
private void setDefaultRequestTimeout() {
Preferences.getDefault().setDefault(PreferenceConstants.REQUEST_TIMEOUT, Constants.DEFAULT_REQUEST_TIMEOUT);
}

/**
* Sets the default font size for chat interface elements.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ public class PreferencePage extends FieldEditorPreferencePage implements IWorkbe

/** Field editors for general plugin settings */
private IntegerFieldEditor connectionTimeoutEditor;
private IntegerFieldEditor requestTimeoutEditor;
private IntegerFieldEditor chatFontSizeEditor;
private IntegerFieldEditor notificationFontSizeEditor;
private BooleanFieldEditor streamingEditor;
Expand Down Expand Up @@ -119,6 +120,10 @@ private void createGlobalSettingsGroup(Composite parent) {
connectionTimeoutEditor = new IntegerFieldEditor(PreferenceConstants.CONNECTION_TIMEOUT,
"Connection Timeout (ms):", parent);
connectionTimeoutEditor.setValidRange(Constants.MIN_CONNECTION_TIMEOUT, Constants.MAX_CONNECTION_TIMEOUT);

requestTimeoutEditor = new IntegerFieldEditor(PreferenceConstants.REQUEST_TIMEOUT,
"Request Timeout (ms):", parent);
requestTimeoutEditor.setValidRange(Constants.MIN_REQUEST_TIMEOUT, Constants.MAX_REQUEST_TIMEOUT);

chatFontSizeEditor = new IntegerFieldEditor(PreferenceConstants.CHAT_FONT_SIZE, "Chat Font Size:", parent);
chatFontSizeEditor.setValidRange(Constants.MIN_CHAT_FONT_SIZE, Constants.MAX_CHAT_FONT_SIZE);
Expand All @@ -135,6 +140,7 @@ private void createGlobalSettingsGroup(Composite parent) {
BooleanFieldEditor.SEPARATE_LABEL, parent);

addField(connectionTimeoutEditor);
addField(requestTimeoutEditor);
addField(chatFontSizeEditor);
addField(notificationFontSizeEditor);
addField(streamingEditor);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,15 @@ public static IPreferenceStore getDefault() {
public static Integer getConnectionTimeout() {
return preferenceStore.getInt(PreferenceConstants.CONNECTION_TIMEOUT);
}

/**
* Returns the request timeout value.
*
* @return The request timeout value.
*/
public static Integer getRequestTimeout() {
return preferenceStore.getInt(PreferenceConstants.REQUEST_TIMEOUT);
}

/**
* Returns the chat font size.
Expand Down

0 comments on commit b397cdd

Please sign in to comment.