Skip to content

Commit

Permalink
Add ruff rules SIM
Browse files Browse the repository at this point in the history
  • Loading branch information
cbornet committed Oct 1, 2024
1 parent 7b7e5cd commit 76611a3
Show file tree
Hide file tree
Showing 63 changed files with 226 additions and 374 deletions.
9 changes: 4 additions & 5 deletions src/backend/base/langflow/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -457,11 +457,10 @@ def migration(
"""
Run or test migrations.
"""
if fix:
if not typer.confirm(
"This will delete all data necessary to fix migrations. Are you sure you want to continue?"
):
raise typer.Abort
if fix and not typer.confirm(
"This will delete all data necessary to fix migrations. Are you sure you want to continue?"
):
raise typer.Abort

initialize_services(fix_migration=fix)
db_service = get_db_service()
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/api/log_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,10 +90,7 @@ async def logs(
status_code=HTTPStatus.BAD_REQUEST,
detail="Timestamp is required when requesting logs after the timestamp",
)
if lines_before <= 0:
content = log_buffer.get_last_n(10)
else:
content = log_buffer.get_last_n(lines_before)
content = log_buffer.get_last_n(10) if lines_before <= 0 else log_buffer.get_last_n(lines_before)
else:
if lines_before > 0:
content = log_buffer.get_before_timestamp(timestamp=timestamp, lines=lines_before)
Expand Down
25 changes: 11 additions & 14 deletions src/backend/base/langflow/api/v1/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,20 +344,17 @@ async def build_vertices(
raise ValueError(msg) from exc
event_manager.on_end_vertex(data={"build_data": build_data})
await client_consumed_queue.get()
if vertex_build_response.valid:
if vertex_build_response.next_vertices_ids:
tasks = []
for next_vertex_id in vertex_build_response.next_vertices_ids:
task = asyncio.create_task(
build_vertices(next_vertex_id, graph, client_consumed_queue, event_manager)
)
tasks.append(task)
try:
await asyncio.gather(*tasks)
except asyncio.CancelledError:
for task in tasks:
task.cancel()
return
if vertex_build_response.valid and vertex_build_response.next_vertices_ids:
tasks = []
for next_vertex_id in vertex_build_response.next_vertices_ids:
task = asyncio.create_task(build_vertices(next_vertex_id, graph, client_consumed_queue, event_manager))
tasks.append(task)
try:
await asyncio.gather(*tasks)
except asyncio.CancelledError:
for task in tasks:
task.cancel()
return

async def event_generator(event_manager: EventManager, client_consumed_queue: asyncio.Queue) -> None:
if not data:
Expand Down
13 changes: 6 additions & 7 deletions src/backend/base/langflow/api/v1/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,13 +95,12 @@ def validate_input_and_tweaks(input_request: SimplifiedAPIRequest):
if has_input_value and input_value_is_chat:
msg = "If you pass an input_value to the chat input, you cannot pass a tweak with the same name."
raise InvalidChatInputException(msg)
elif "Text Input" in key or "TextInput" in key:
if isinstance(value, dict):
has_input_value = value.get("input_value") is not None
input_value_is_text = input_request.input_value is not None and input_request.input_type == "text"
if has_input_value and input_value_is_text:
msg = "If you pass an input_value to the text input, you cannot pass a tweak with the same name."
raise InvalidChatInputException(msg)
elif ("Text Input" in key or "TextInput" in key) and isinstance(value, dict):
has_input_value = value.get("input_value") is not None
input_value_is_text = input_request.input_value is not None and input_request.input_type == "text"
if has_input_value and input_value_is_text:
msg = "If you pass an input_value to the text input, you cannot pass a tweak with the same name."
raise InvalidChatInputException(msg)


async def simple_run_flow(
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/api/v1/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,10 +310,7 @@ async def upload_file(
contents = await file.read()
data = orjson.loads(contents)
response_list = []
if "flows" in data:
flow_list = FlowListCreate(**data)
else:
flow_list = FlowListCreate(flows=[FlowCreate(**data)])
flow_list = FlowListCreate(**data) if "flows" in data else FlowListCreate(flows=[FlowCreate(**data)])
# Now we set the user_id for all flows
for flow in flow_list.flows:
flow.user_id = current_user.id
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/base/agents/crewai/crew.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,10 +53,7 @@ def get_task_callback(
self,
) -> Callable:
def task_callback(task_output: TaskOutput):
if self._vertex:
vertex_id = self._vertex.id
else:
vertex_id = self.display_name or self.__class__.__name__
vertex_id = self._vertex.id if self._vertex else self.display_name or self.__class__.__name__
self.log(task_output.model_dump(), name=f"Task (Agent: {task_output.agent}) - {vertex_id}")

return task_callback
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/base/flow_processing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,10 +67,7 @@ def build_data_from_result_data(result_data: ResultData, get_final_results_only:
if isinstance(result_data.results, dict):
for name, result in result_data.results.items():
dataobj: Data | Message | None = None
if isinstance(result, Message):
dataobj = result
else:
dataobj = Data(data=result, text_key=name)
dataobj = result if isinstance(result, Message) else Data(data=result, text_key=name)

data.append(dataobj)
else:
Expand Down
23 changes: 11 additions & 12 deletions src/backend/base/langflow/base/io/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,16 @@ def store_message(
msg = "Only one message can be stored at a time."
raise ValueError(msg)
stored_message = messages[0]
if hasattr(self, "_event_manager") and self._event_manager and stored_message.id:
if not isinstance(message.text, str):
complete_message = self._stream_message(message, stored_message.id)
message_table = update_message(message_id=stored_message.id, message={"text": complete_message})
stored_message = Message(**message_table.model_dump())
self.vertex._added_message = stored_message
if (
hasattr(self, "_event_manager")
and self._event_manager
and stored_message.id
and not isinstance(message.text, str)
):
complete_message = self._stream_message(message, stored_message.id)
message_table = update_message(message_id=stored_message.id, message={"text": complete_message})
stored_message = Message(**message_table.model_dump())
self.vertex._added_message = stored_message
self.status = stored_message
return stored_message

Expand Down Expand Up @@ -77,19 +81,14 @@ def build_with_data(
session_id: str | None = None,
return_message: bool | None = False,
) -> Message:
message: Message | None = None

if isinstance(input_value, Data):
# Update the data of the record
message = Message.from_data(input_value)
else:
message = Message(
text=input_value, sender=sender, sender_name=sender_name, files=files, session_id=session_id
)
if not return_message:
message_text = message.text
else:
message_text = message # type: ignore
message_text = message.text if not return_message else message

self.status = message_text
if session_id and isinstance(message, Message) and isinstance(message.text, str):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -130,10 +130,7 @@ def build_agent_and_task(self) -> list[SequentialTask]:

# If there's a previous task, create a list of tasks
if self.previous_task:
if isinstance(self.previous_task, list):
tasks = self.previous_task + [task]
else:
tasks = [self.previous_task, task]
tasks = self.previous_task + [task] if isinstance(self.previous_task, list) else [self.previous_task, task]
else:
tasks = [task]

Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/components/chains/SQLGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,7 @@ class SQLGeneratorComponent(LCChainComponent):
outputs = [Output(display_name="Text", name="text", method="invoke_chain")]

def invoke_chain(self) -> Message:
if self.prompt:
prompt_template = PromptTemplate.from_template(template=self.prompt)
else:
prompt_template = None
prompt_template = PromptTemplate.from_template(template=self.prompt) if self.prompt else None

if self.top_k < 1:
msg = "Top K must be greater than 0."
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/components/data/Gmail.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,7 @@ def _extract_email_content(self, msg: Any) -> HumanMessage:
msg = "From email not found."
raise ValueError(msg)

if "parts" in msg["payload"]:
parts = msg["payload"]["parts"]
else:
parts = [msg["payload"]]
parts = msg["payload"]["parts"] if "parts" in msg["payload"] else [msg["payload"]]

for part in parts:
if part["mimeType"] == "text/plain":
Expand Down
18 changes: 7 additions & 11 deletions src/backend/base/langflow/components/data/GoogleDriveSearch.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,17 +96,13 @@ def generate_file_url(self, file_id: str, mime_type: str) -> str:
"""
Generates the appropriate Google Drive URL for a file based on its MIME type.
"""
if mime_type == "application/vnd.google-apps.document":
return f"https://docs.google.com/document/d/{file_id}/edit"
if mime_type == "application/vnd.google-apps.spreadsheet":
return f"https://docs.google.com/spreadsheets/d/{file_id}/edit"
if mime_type == "application/vnd.google-apps.presentation":
return f"https://docs.google.com/presentation/d/{file_id}/edit"
if mime_type == "application/vnd.google-apps.drawing":
return f"https://docs.google.com/drawings/d/{file_id}/edit"
if mime_type == "application/pdf":
return f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk"
return f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk"
return {
"application/vnd.google-apps.document": f"https://docs.google.com/document/d/{file_id}/edit",
"application/vnd.google-apps.spreadsheet": f"https://docs.google.com/spreadsheets/d/{file_id}/edit",
"application/vnd.google-apps.presentation": f"https://docs.google.com/presentation/d/{file_id}/edit",
"application/vnd.google-apps.drawing": f"https://docs.google.com/drawings/d/{file_id}/edit",
"application/pdf": f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk",
}.get(mime_type, f"https://drive.google.com/file/d/{file_id}/view?usp=drivesdk")

def search_files(self) -> dict:
# Load the token information from the JSON string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,7 @@ def generate_embeddings(self) -> Data:
embeddings = embedding_model.embed_documents([text_content])

# Assuming the embedding model returns a list of embeddings, we take the first one
if embeddings:
embedding_vector = embeddings[0]
else:
embedding_vector = []
embedding_vector = embeddings[0] if embeddings else []

# Create a Data object to encapsulate the results
result_data = Data(data={"text": text_content, "embeddings": embedding_vector})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,25 +21,24 @@ def embed_documents(self, texts: list[str]) -> list[list[float]]:
"Authorization": f"Bearer {self.api_key.get_secret_value()}",
}

with httpx.Client() as client:
with concurrent.futures.ThreadPoolExecutor() as executor:
futures = []
for i, text in enumerate(texts):
futures.append((i, executor.submit(self._embed_text, client, headers, text)))

for index, future in futures:
try:
result_data = future.result()
assert len(result_data["data"]) == 1, "Expected one embedding"
embeddings[index] = result_data["data"][0]["embedding"]
except (
httpx.HTTPStatusError,
httpx.RequestError,
json.JSONDecodeError,
KeyError,
) as e:
logger.error(f"Error occurred: {e}")
raise
with httpx.Client() as client, concurrent.futures.ThreadPoolExecutor() as executor:
futures = []
for i, text in enumerate(texts):
futures.append((i, executor.submit(self._embed_text, client, headers, text)))

for index, future in futures:
try:
result_data = future.result()
assert len(result_data["data"]) == 1, "Expected one embedding"
embeddings[index] = result_data["data"][0]["embedding"]
except (
httpx.HTTPStatusError,
httpx.RequestError,
json.JSONDecodeError,
KeyError,
) as e:
logger.error(f"Error occurred: {e}")
raise

return embeddings # type: ignore

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,15 +61,9 @@ def build(
"Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`."
)
raise ImportError(msg)
if crawlerOptions:
crawler_options_dict = crawlerOptions.__dict__["data"]["text"]
else:
crawler_options_dict = {}
crawler_options_dict = crawlerOptions.__dict__["data"]["text"] if crawlerOptions else {}

if pageOptions:
page_options_dict = pageOptions.__dict__["data"]["text"]
else:
page_options_dict = {}
page_options_dict = pageOptions.__dict__["data"]["text"] if pageOptions else {}

if not idempotency_key:
idempotency_key = str(uuid.uuid4())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,9 @@ def build(
"Could not import firecrawl integration package. " "Please install it with `pip install firecrawl-py`."
)
raise ImportError(msg)
if extractorOptions:
extractor_options_dict = extractorOptions.__dict__["data"]["text"]
else:
extractor_options_dict = {}
extractor_options_dict = extractorOptions.__dict__["data"]["text"] if extractorOptions else {}

if pageOptions:
page_options_dict = pageOptions.__dict__["data"]["text"]
else:
page_options_dict = {}
page_options_dict = pageOptions.__dict__["data"]["text"] if pageOptions else {}

app = FirecrawlApp(api_key=api_key)
results = app.scrape_url(
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/components/models/AIMLModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
aiml_api_base = self.aiml_api_base or "https://api.aimlapi.com"
seed = self.seed

if isinstance(aiml_api_key, SecretStr):
openai_api_key = aiml_api_key.get_secret_value()
else:
openai_api_key = aiml_api_key
openai_api_key = aiml_api_key.get_secret_value() if isinstance(aiml_api_key, SecretStr) else aiml_api_key

return ChatOpenAI(
model=model_name,
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/components/models/CohereModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
cohere_api_key = self.cohere_api_key
temperature = self.temperature

if cohere_api_key:
api_key = SecretStr(cohere_api_key)
else:
api_key = None
api_key = SecretStr(cohere_api_key) if cohere_api_key else None

return ChatCohere(
temperature=temperature or 0.75,
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/components/models/MistralModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,10 +78,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
random_seed = self.random_seed
safe_mode = self.safe_mode

if mistral_api_key:
api_key = SecretStr(mistral_api_key)
else:
api_key = None
api_key = SecretStr(mistral_api_key) if mistral_api_key else None

return ChatMistralAI(
max_tokens=max_tokens or None,
Expand Down
5 changes: 1 addition & 4 deletions src/backend/base/langflow/components/models/OpenAIModel.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,10 +102,7 @@ def build_model(self) -> LanguageModel: # type: ignore[type-var]
json_mode = bool(output_schema_dict) or self.json_mode
seed = self.seed

if openai_api_key:
api_key = SecretStr(openai_api_key)
else:
api_key = None
api_key = SecretStr(openai_api_key) if openai_api_key else None
output = ChatOpenAI(
max_tokens=max_tokens or None,
model_kwargs=model_kwargs,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ class OutputParserComponent(Component):
def build_parser(self) -> OutputParser:
if self.parser_type == "CSV":
return CommaSeparatedListOutputParser()
else:
raise ValueError("Unsupported or missing parser")
msg = "Unsupported or missing parser"
raise ValueError(msg)

def format_instructions(self) -> Message:
if self.parser_type == "CSV":
return Message(text=CommaSeparatedListOutputParser().get_format_instructions())
else:
raise ValueError("Unsupported or missing parser")
msg = "Unsupported or missing parser"
raise ValueError(msg)
6 changes: 3 additions & 3 deletions src/backend/base/langflow/components/prototypes/SubFlow.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def add_inputs_to_build_config(self, inputs_vertex: list[Vertex], build_config:
for vertex in inputs_vertex:
new_vertex_inputs = []
field_template = vertex.data["node"]["template"]
for inp in field_template.keys():
for inp in field_template:
if inp not in ["code", "_type"]:
field_template[inp]["display_name"] = (
vertex.display_name + " - " + field_template[inp]["display_name"]
Expand All @@ -84,10 +84,10 @@ def add_inputs_to_build_config(self, inputs_vertex: list[Vertex], build_config:

async def generate_results(self) -> list[Data]:
tweaks: dict = {}
for field in self._attributes.keys():
for field in self._attributes:
if field != "flow_name":
[node, name] = field.split("|")
if node not in tweaks.keys():
if node not in tweaks:
tweaks[node] = {}
tweaks[node][name] = self._attributes[field]
flow_name = self._attributes.get("flow_name")
Expand Down
Loading

0 comments on commit 76611a3

Please sign in to comment.