Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: update assistants components and add integrations tests #3887

Merged
merged 2 commits into from
Sep 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
from .create_assistant import AssistantsCreateAssistant
from .create_thread import AssistantsCreateThread
from .dotenv import Dotenv
from .get_assistant import AssistantsGetAssistantName
from .list_assistants import AssistantsListAssistants
from .run import AssistantsRun
Expand All @@ -8,6 +10,8 @@
"AssistantsCreateAssistant",
"AssistantsGetAssistantName",
"AssistantsListAssistants",
"AssistantsCreateThread",
"AssistantsRun",
"GetEnvVar",
"Dotenv",
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I looked over at the Dotenv component, and curious how that works.

Q) Why does it need an output method?
Q) If I have a .env file I've loaded up during my langflow run --env .env, and I use this component with a separate env file, will it replace that entire set, or union the two?

I see why this is needed though - if I can't do langflow run myself (in Astra), we need an easy way to pass env vars.



class Dotenv(Component):
    display_name = "Dotenv"
    description = "Load .env file into env vars"

    inputs = [
        MultilineSecretInput(
            name="dotenv_file_content",
            display_name="Dotenv file content",
            info="Paste the content of your .env file directly, since contents are sensitive, using a Global variable set as 'password' is recommended",
        )
    ]

    outputs = [
        Output(display_name="env_set", name="env_set", method="process_inputs"),
    ]

    def process_inputs(self) -> Message:
        fake_file = io.StringIO(self.dotenv_file_content)
        result = load_dotenv(stream=fake_file, override=True)

        message = Message(text="No variables found in .env")
        if result:
            message = Message(text="Loaded .env")
        return message

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah assistants supports loads of LLM providers so it's very convenient to allow the user to add them all in one go. If there are existing env vars that are set they do not get unset but they could get overwritten.

]
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ class AssistantsCreateAssistant(Component):
icon = "bot"
display_name = "Create Assistant"
description = "Creates an Assistant and returns it's id"
client = patch(OpenAI())
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are there potential issues that may arise from using a class variable for the client? I see we're creating a new assistant for each invocation, so this seems low risk, but just wondering.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Previously we were doing one per invocation so I actually think a client per class is less problematic. If they were async clients we could have races but these are synchronous.

One potential future improvement would be to do a shared client for all the assistants. Not sure what a good pattern for this would be, just a singleton?


inputs = [
StrInput(
Expand Down Expand Up @@ -45,8 +46,7 @@ class AssistantsCreateAssistant(Component):

def process_inputs(self) -> Message:
print(f"env_set is {self.env_set}")
client = patch(OpenAI())
assistant = client.beta.assistants.create(
assistant = self.client.beta.assistants.create(
name=self.assistant_name,
instructions=self.instructions,
model=self.model,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
class AssistantsCreateThread(Component):
display_name = "Create Assistant Thread"
description = "Creates a thread and returns the thread id"
client = patch(OpenAI())

inputs = [
MultilineInput(
Expand All @@ -23,9 +24,7 @@ class AssistantsCreateThread(Component):
]

def process_inputs(self) -> Message:
client = patch(OpenAI())

thread = client.beta.threads.create()
thread = self.client.beta.threads.create()
thread_id = thread.id

message = Message(text=thread_id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
class AssistantsGetAssistantName(Component):
display_name = "Get Assistant name"
description = "Assistant by id"
client = patch(OpenAI())

inputs = [
StrInput(
Expand All @@ -29,7 +30,6 @@ class AssistantsGetAssistantName(Component):
]

def process_inputs(self) -> Message:
patch(OpenAI())
assistant = self.client.beta.assistants.retrieve(
assistant_id=self.assistant_id,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,14 @@
class AssistantsListAssistants(Component):
display_name = "List Assistants"
description = "Returns a list of assistant id's"
client = patch(OpenAI())

outputs = [
Output(display_name="Assistants", name="assistants", method="process_inputs"),
]

def process_inputs(self) -> Message:
patch(OpenAI())
assistants = self.client.beta.assistants.list()
assistants = self.client.beta.assistants.list().data
id_list = [assistant.id for assistant in assistants]
message = Message(
# get text from list
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
class AssistantsRun(Component):
display_name = "Run Assistant"
description = "Executes an Assistant Run against a thread"
client = patch(OpenAI())

def update_build_config(
self,
Expand Down
3 changes: 3 additions & 0 deletions src/backend/base/langflow/template/template/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,9 @@ def from_dict(cls, data: dict) -> "Template":
_input = Input(**value)

data["fields"].append(_input)
# Handles components with no inputs
if "fields" not in data:
data["fields"] = []
return cls(**data)

# For backwards compatibility
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import pytest

from tests.integration.utils import run_single_component


async def test_list_assistants():
from langflow.components.astra_assistants import AssistantsListAssistants

results = await run_single_component(
AssistantsListAssistants,
inputs={},
)
assert results["assistants"].text is not None


@pytest.mark.api_key_required
@pytest.mark.asyncio
async def test_create_assistants():
from langflow.components.astra_assistants import AssistantsCreateAssistant

results = await run_single_component(
AssistantsCreateAssistant,
inputs={
"assistant_name": "artist-bot",
"instructions": "reply only with ascii art",
"model": "gpt-4o-mini",
},
)
assistant_id = results["assistant_id"].text
assert assistant_id is not None
await test_list_assistants()
await get_assistant_name(assistant_id)
thread_id = await test_create_thread()
await run_assistant(assistant_id, thread_id)


async def test_create_thread():
from langflow.components.astra_assistants import AssistantsCreateThread

results = await run_single_component(
AssistantsCreateThread,
inputs={},
)
thread_id = results["thread_id"].text
assert thread_id is not None
return thread_id


async def get_assistant_name(assistant_id):
from langflow.components.astra_assistants import AssistantsGetAssistantName

results = await run_single_component(
AssistantsGetAssistantName,
inputs={
"assistant_id": assistant_id,
},
)
assert results["assistant_name"].text is not None


async def run_assistant(assistant_id, thread_id):
from langflow.components.astra_assistants import AssistantsRun

results = await run_single_component(
AssistantsRun,
inputs={
"assistant_id": assistant_id,
"user_message": "hello",
"thread_id": thread_id,
},
)
assert results["assistant_response"].text is not None
Loading