Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add LLM output for the AI ML component to use ChatOpenAI base model #3230

Merged
merged 4 commits into from
Aug 7, 2024
Merged
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
206 changes: 92 additions & 114 deletions src/backend/base/langflow/components/models/AIMLModel.py
Original file line number Diff line number Diff line change
@@ -1,148 +1,126 @@
import json
import httpx
from langflow.base.models.aiml_constants import AIML_CHAT_MODELS
from langflow.custom.custom_component.component import Component
import operator
from functools import reduce

from langflow.inputs.inputs import FloatInput, IntInput, MessageInput, SecretStrInput
from langflow.schema.message import Message
from langflow.template.field.base import Output
from loguru import logger
from langflow.field_typing.range_spec import RangeSpec
from langchain_openai import ChatOpenAI
from pydantic.v1 import SecretStr

from langflow.base.models.aiml_constants import AIML_CHAT_MODELS
from langflow.base.models.model import LCModelComponent
from langflow.field_typing import LanguageModel
from langflow.inputs import (
BoolInput,
DictInput,
DropdownInput,
FloatInput,
IntInput,
SecretStrInput,
StrInput,
)


class AIMLModelComponent(Component):
display_name = "AI/ML API"
description = "Generates text using the AI/ML API"
icon = "AI/ML"
chat_completion_url = "https://api.aimlapi.com/v1/chat/completions"

outputs = [
Output(display_name="Text", name="text_output", method="make_request"),
]
class AIMLModelComponent(LCModelComponent):
display_name = "AIML"
description = "Generates text using AIML LLMs."
icon = "AIML"
name = "AIMLModel"
documentation = "https://docs.aimlapi.com/api-reference/examples"

inputs = [
DropdownInput(
name="model_name",
display_name="Model Name",
options=AIML_CHAT_MODELS,
required=True,
),
SecretStrInput(
name="aiml_api_key",
display_name="AI/ML API Key",
value="AIML_API_KEY",
),
MessageInput(name="input_value", display_name="Input", required=True),
inputs = LCModelComponent._base_inputs + [
IntInput(
name="max_tokens",
display_name="Max Tokens",
advanced=True,
info="The maximum number of tokens to generate. Set to 0 for unlimited tokens.",
range_spec=RangeSpec(min=0, max=128000),
),
StrInput(
name="stop_tokens",
display_name="Stop Tokens",
info="Comma-separated list of tokens to signal the model to stop generating text.",
DictInput(name="model_kwargs", display_name="Model Kwargs", advanced=True),
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi @vasconceloscezar thanks for the improvements. Sorry, I was out of town last week or would have taken a look then.

You may be more familiar with the AI/ML API than I am, so just wanted to ask some clarifying questions here.

IIUC, the AI/ML API only supports certain parameters that are common to all supported models (https://docs.aimlapi.com/api-reference/parameters). Does allowing model_kwargs here give the user the false(?) impression that model-specific args are allowed?

BoolInput(
name="json_mode",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see this error using this change out of the box:

/Users/jordan.frazier/Documents/Langflow/langflow/.venv/lib/python3.11/site-packages/langchain_core/utils/utils.py:234: UserWarning: WARNING! json_mode is not default parameter.
                json_mode was transferred to model_kwargs.
                Please confirm that json_mode is what you intended.
  warnings.warn(
[08/12/24 09:44:42] ERROR    2024-08-12 09:44:42 - ERROR    - base - Completions.create() got an unexpected keyword argument 'json_mode'                                                               base.py:687

  • I believe json_mode is set differently using ChatOpenAI.
  • Did you test this and it worked for you? Just want to make sure I didn't accidentally change anything prior to testing.

display_name="JSON Mode",
advanced=True,
info="If True, it will output JSON regardless of passing a schema.",
),
IntInput(
name="top_k",
display_name="Top K",
info="Limits token selection to top K. (Default: 40)",
DictInput(
name="output_schema",
is_list=True,
display_name="Schema",
advanced=True,
info="The schema for the Output of the model. You must pass the word JSON in the prompt. If left blank, JSON mode will be disabled.",
),
FloatInput(
name="top_p",
display_name="Top P",
info="Works together with top-k. (Default: 0.9)",
advanced=True,
DropdownInput(
name="model_name",
display_name="Model Name",
advanced=False,
options=AIML_CHAT_MODELS,
value=AIML_CHAT_MODELS[0],
),
FloatInput(
name="repeat_penalty",
display_name="Repeat Penalty",
info="Penalty for repetitions in generated text. (Default: 1.1)",
StrInput(
name="aiml_api_base",
display_name="AIML API Base",
advanced=True,
info="The base URL of the OpenAI API. Defaults to https://api.aimlapi.com . You can change this to use other APIs like JinaChat, LocalAI e Prem.",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I believe the intent of this component was to only hit the AI/ML API, so would lean towards removing this parameter. If users want to use a different base, we can add another component for that or use the generic OpenAI component. (I'm glad using the base_url here works using the aiml base - I tried with the OpenAIEmbeddings class and had issues).

),
FloatInput(
name="temperature",
display_name="Temperature",
value=0.2,
info="Controls the creativity of model responses.",
SecretStrInput(
name="api_key",
display_name="AIML API Key",
info="The AIML API Key to use for the OpenAI model.",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't be specific to the OpenAI model

advanced=False,
value="AIML_API_KEY",
),
StrInput(
name="system_message",
display_name="System Message",
info="System message to pass to the model.",
FloatInput(name="temperature", display_name="Temperature", value=0.1),
IntInput(
name="seed",
display_name="Seed",
info="The seed controls the reproducibility of the job.",
advanced=True,
value=1,
),
]

def make_request(self) -> Message:
api_key = SecretStr(self.aiml_api_key) if self.aiml_api_key else None

headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {api_key.get_secret_value()}" if api_key else "",
}

messages = []
if self.system_message:
messages.append({"role": "system", "content": self.system_message})
def build_model(self) -> LanguageModel: # type: ignore[type-var]
output_schema_dict: dict[str, str] = reduce(operator.ior, self.output_schema or {}, {})
aiml_api_key = self.api_key
temperature = self.temperature
model_name: str = self.model_name
max_tokens = self.max_tokens
model_kwargs = self.model_kwargs or {}
aiml_api_base = self.aiml_api_base or "https://api.aimlapi.com"
json_mode = bool(output_schema_dict) or self.json_mode

Check failure on line 89 in src/backend/base/langflow/components/models/AIMLModel.py

View workflow job for this annotation

GitHub Actions / Ruff Style Check (3.12)

Ruff (F841)

src/backend/base/langflow/components/models/AIMLModel.py:89:9: F841 Local variable `json_mode` is assigned to but never used
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Seems like output_schema exists only to determine whether to set json_mode. I do have the concern that even if fixed, the AI/ML API wouldn't support this (or, only would for specific models, which may be confusing for users).

seed = self.seed

Check failure on line 90 in src/backend/base/langflow/components/models/AIMLModel.py

View workflow job for this annotation

GitHub Actions / Ruff Style Check (3.12)

Ruff (F841)

src/backend/base/langflow/components/models/AIMLModel.py:90:9: F841 Local variable `seed` is assigned to but never used

if self.input_value:
if isinstance(self.input_value, Message):
# Though we aren't using langchain here, the helper method is useful
message = self.input_value.to_lc_message()
if message.type == "human":
messages.append({"role": "user", "content": message.content})
else:
raise ValueError(f"Expected user message, saw: {message.type}")
else:
raise TypeError(f"Expected Message type, saw: {type(self.input_value)}")
if isinstance(aiml_api_key, SecretStr):
openai_api_key = aiml_api_key.get_secret_value()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: var name should stay aiml_api_key

else:
raise ValueError("Please provide an input value")
openai_api_key = aiml_api_key

payload = {
"model": self.model_name,
"messages": messages,
"max_tokens": self.max_tokens or None,
"temperature": self.temperature or 0.2,
"top_k": self.top_k or 40,
"top_p": self.top_p or 0.9,
"repeat_penalty": self.repeat_penalty or 1.1,
"stop_tokens": self.stop_tokens or None,
}
model = ChatOpenAI(
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Was curious about this usage - does using OpenAI's implementation have any compatibility issues using other models?

model=model_name,
temperature=temperature,
api_key=openai_api_key,
base_url=aiml_api_base,
max_tokens=max_tokens or None,
**model_kwargs,
)

try:
response = httpx.post(self.chat_completion_url, headers=headers, json=payload)
try:
response.raise_for_status()
result_data = response.json()
choice = result_data["choices"][0]
result = choice["message"]["content"]
except httpx.HTTPStatusError as http_err:
logger.error(f"HTTP error occurred: {http_err}")
raise http_err
except httpx.RequestError as req_err:
logger.error(f"Request error occurred: {req_err}")
raise req_err
except json.JSONDecodeError:
logger.warning("Failed to decode JSON, response text: {response.text}")
result = response.text
except KeyError as key_err:
logger.warning(f"Key error: {key_err}, response content: {result_data}")
raise key_err
return model # type: ignore

def _get_exception_message(self, e: Exception):
"""
Get a message from an OpenAI exception.

self.status = result
except httpx.TimeoutException:
return Message(text="Request timed out.")
except Exception as exc:
logger.error(f"Error: {exc}")
raise
Args:
exception (Exception): The exception to get the message from.

return Message(text=result)
Returns:
str: The message from the exception.
"""
try:
from openai.error import BadRequestError
except ImportError:
return None
if isinstance(e, BadRequestError):
message = e.json_body.get("error", {}).get("message", "") # type: ignore
if message:
return message
return None
Loading