From d797af17c026ded4c2f412676f720556af0fc2fb Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Sat, 23 Nov 2024 21:03:56 +0000 Subject: [PATCH 01/28] Added pip install corrections / instructions to notebook and blog for CaptainAgent --- notebook/agentchat_captainagent.ipynb | 6 +++--- website/blog/2024-11-15-CaptainAgent/index.mdx | 6 ++++++ 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/notebook/agentchat_captainagent.ipynb b/notebook/agentchat_captainagent.ipynb index 0275b7e1d5..abf652c6d9 100644 --- a/notebook/agentchat_captainagent.ipynb +++ b/notebook/agentchat_captainagent.ipynb @@ -11,9 +11,9 @@ "\n", "````{=mdx}\n", ":::info Requirements\n", - "Install `ag2`:\n", + "Install `ag2` with CaptainAgent:\n", "```bash\n", - "pip install ag2[autobuild]\n", + "pip install ag2[captainagent]\n", "```\n", "\n", "For more information, please refer to the [installation guide](/docs/installation/).\n", @@ -522,7 +522,7 @@ "\n", "For agent library, refer to [`captainagent_expert_library.json`](https://github.com/ag2ai/ag2/blob/main/notebook/captainagent_expert_library.json) for samples. You can refer to [docs](https://ag2ai.github.io/ag2/docs/topics/captainagent/agent_library) on how to customize your own expert library.\n", "\n", - "For tool library, we provide a set of tools [here](https://github.com/ag2ai/ag2/tree/main/autogen/agentchat/contrib/captainagent/tools/README.md), the tools are categorized into three types: data analysis, information_retrieval, math." + "For tool library, we provide a set of tools [here](https://github.com/ag2ai/ag2/tree/main/autogen/agentchat/contrib/captainagent/tools/README.md), the tools are categorized into three types: data analysis, information_retrieval, math. If you are using the tools, you should [install the requirements](https://github.com/ag2ai/ag2/tree/main/autogen/agentchat/contrib/captainagent/tools/README.md#how-to-use) for them." ] }, { diff --git a/website/blog/2024-11-15-CaptainAgent/index.mdx b/website/blog/2024-11-15-CaptainAgent/index.mdx index 5e328ca957..b1562a38ce 100644 --- a/website/blog/2024-11-15-CaptainAgent/index.mdx +++ b/website/blog/2024-11-15-CaptainAgent/index.mdx @@ -24,6 +24,12 @@ CaptainAgent iterates over the following two steps until the problem is successf The design of CaptainAgent allows it to leverage agents and tools from a pre-specified agent library and tool library. In the following section, we demonstrate how to use CaptainAgent with or without the provided library. +To install AG2 with CaptainAgent: + +```bash +pip install ag2[captainagent] +``` + # Using CaptainAgent without pre-specified agent/tool libraries CaptainAgent can serve as a drop-in replacement for the general `AssistantAgent` class in AG2. To do that we just need to add a few lines of configurations for the group chat involved. Without the agent library and tool library, CaptainAgent will automatically generate a set of agents into a group chat. From 6fa26f1e47c73b245ab62458d418a8a2340d5b7d Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Sat, 23 Nov 2024 21:20:19 +0000 Subject: [PATCH 02/28] Add CaptainAgent tools to pip install --- MANIFEST.in | 2 ++ setup.py | 6 ++++++ 2 files changed, 8 insertions(+) create mode 100644 MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000000..25700f369d --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +# Include CaptainAgent tools +recursive-include autogen/agentchat/contrib/captainagent/tools * diff --git a/setup.py b/setup.py index 16abda3298..f2bb9c7548 100644 --- a/setup.py +++ b/setup.py @@ -131,6 +131,12 @@ long_description_content_type="text/markdown", url="https://github.com/ag2ai/ag2", packages=setuptools.find_packages(include=["autogen*"], exclude=["test"]), + package_data={ + "autogen.agentchat.contrib.captainagent": [ + "tools/**/*", # CaptainAgent tools + ] + }, + include_package_data=True, install_requires=install_requires, extras_require=extra_require, classifiers=[ From 79fd07b915421cf11779eb506343aa9de0a288c8 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Sun, 24 Nov 2024 19:30:10 +0000 Subject: [PATCH 03/28] CaptainAgent name corrections, Default CaptainAgent model information, excluding unnecessary mdx in .gitignore, grammar --- website/.gitignore | 4 ++++ website/docs/topics/captainagent/_category_.json | 2 +- .../docs/topics/captainagent/configurations.mdx | 15 ++++++++++++--- website/docs/topics/llm_configuration.ipynb | 2 +- 4 files changed, 18 insertions(+), 5 deletions(-) diff --git a/website/.gitignore b/website/.gitignore index c73d767838..21fd07b316 100644 --- a/website/.gitignore +++ b/website/.gitignore @@ -14,6 +14,7 @@ docs/reference docs/tutorial/*.mdx docs/tutorial/**/*.png !docs/tutorial/assets/*.png +docs/topics/swarm.mdx docs/topics/llm_configuration.mdx docs/topics/code-execution/*.mdx docs/topics/task_decomposition.mdx @@ -23,6 +24,9 @@ docs/topics/non-openai-models/**/*.py docs/topics/non-openai-models/**/*.svg docs/topics/code-execution/*.mdx docs/topics/groupchat/customized_speaker_selection.mdx +docs/topics/groupchat/resuming_groupchat.mdx +docs/topics/groupchat/transform_messages_speaker_selection.mdx +docs/topics/groupchat/using_custom_model_client_classes.mdx cell-*-output-*.png # Misc diff --git a/website/docs/topics/captainagent/_category_.json b/website/docs/topics/captainagent/_category_.json index 60f390c97e..ae919a6f5a 100644 --- a/website/docs/topics/captainagent/_category_.json +++ b/website/docs/topics/captainagent/_category_.json @@ -1,4 +1,4 @@ { - "label": "Captain Agent", + "label": "CaptainAgent", "collapsible": true } diff --git a/website/docs/topics/captainagent/configurations.mdx b/website/docs/topics/captainagent/configurations.mdx index b8281042ac..1bdc850d05 100644 --- a/website/docs/topics/captainagent/configurations.mdx +++ b/website/docs/topics/captainagent/configurations.mdx @@ -1,5 +1,5 @@ # Configurations in `nested_config` -Captain Agent requires `nested_config` for configuration. Below is an example, we will break it down and provide a detailed explanation. +CaptainAgent requires `nested_config` for configuration. Below is an example, we will break it down and provide a detailed explanation. ``` nested_config = { @@ -23,6 +23,15 @@ nested_config = { } ``` +:::info Requirements +If you run CaptainAgent without specifying a `nested_config`, a default is used and this **requires an OAI_CONFIG_LIST**. + +**The default model used is OpenAI's GPT-4o**. + +To see the default configuration and the OpenAI model used, see the [captainagent.py](https://github.com/ag2ai/ag2/blob/ceb75d865d0a7eb2b7c0bc51776e093681e0970e/autogen/agentchat/contrib/captainagent.py) file. + +For advice on using OAI_CONFIG_LIST, [see the documentation](https://ag2ai.github.io/ag2/docs/topics/llm_configuration#oai_config_list-pattern). +::: ## `autobuild_init_config` This section is used to configure the initial setup of autobuild. `autobuild_init_config` takes in arguments from `AgentBuilder.__init__()`. Check the full list of arguments [here](https://github.com/ag2ai/ag2/blob/main/autogen/agentchat/contrib/agent_builder.py#L181). @@ -31,10 +40,10 @@ This section is used to configure the initial setup of autobuild. `autobuild_ini Configures the path to API key config. Defaults to `OAI_CONFIG_LIST`. ### `builder_model` -Configures the backbone of agent builder. The builder is used for agent selection from the library. Defaults to `gpt-4o-mini`. +Configures the backbone of agent builder. The builder is used for agent selection from the library. Defaults to `gpt-4o`. ### `agent_model` -Configures the backbone of agents in the group chat. Defaults to `gpt-4o-mini`. +Configures the backbone of agents in the group chat. Defaults to `gpt-4o`. ### Other `kwargs` `autobuild_init_config` takes in arguments from `AgentBuilder.__init__()`. Check the full list of arguments [here](https://github.com/ag2ai/ag2/blob/main/autogen/agentchat/contrib/agent_builder.py#L181). diff --git a/website/docs/topics/llm_configuration.ipynb b/website/docs/topics/llm_configuration.ipynb index 0c094f6531..a001f98459 100644 --- a/website/docs/topics/llm_configuration.ipynb +++ b/website/docs/topics/llm_configuration.ipynb @@ -126,7 +126,7 @@ "\n", "### `OAI_CONFIG_LIST` pattern\n", "\n", - "A common, useful pattern used is to define this `config_list` is via JSON (specified as a file or an environment variable set to a JSON-formatted string) and then use the [`config_list_from_json`](/docs/reference/oai/openai_utils#config_list_from_json) helper function to load it:" + "A common, useful pattern used is to define this `config_list` via JSON (specified as a file or an environment variable set to a JSON-formatted string) and then use the [`config_list_from_json`](/docs/reference/oai/openai_utils#config_list_from_json) helper function to load it:" ] }, { From 2dacbd65ea9c3b1c089a0f362a1bac3197464fd8 Mon Sep 17 00:00:00 2001 From: Mark Sze <66362098+marklysze@users.noreply.github.com> Date: Mon, 25 Nov 2024 07:49:08 +1100 Subject: [PATCH 04/28] Update website/blog/2024-11-15-CaptainAgent/index.mdx Co-authored-by: Chi Wang <4250911+sonichi@users.noreply.github.com> --- website/blog/2024-11-15-CaptainAgent/index.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/blog/2024-11-15-CaptainAgent/index.mdx b/website/blog/2024-11-15-CaptainAgent/index.mdx index b1562a38ce..a1abcde5c2 100644 --- a/website/blog/2024-11-15-CaptainAgent/index.mdx +++ b/website/blog/2024-11-15-CaptainAgent/index.mdx @@ -31,7 +31,7 @@ pip install ag2[captainagent] ``` # Using CaptainAgent without pre-specified agent/tool libraries -CaptainAgent can serve as a drop-in replacement for the general `AssistantAgent` class in AG2. To do that we just need to add a few lines of configurations for the group chat involved. +CaptainAgent can serve as a drop-in replacement for the general `AssistantAgent` class in AG2. It also allows customization such as agent/tool libraries. When the libraries are provided, CaptainAgent will try to leverage them when solving a task. Without the agent library and tool library, CaptainAgent will automatically generate a set of agents into a group chat. ```python From 5d2e98831da2729df356a35dbe191755f880ba7b Mon Sep 17 00:00:00 2001 From: Mark Sze <66362098+marklysze@users.noreply.github.com> Date: Tue, 26 Nov 2024 14:01:30 +1100 Subject: [PATCH 05/28] Update website/blog/2024-11-15-CaptainAgent/index.mdx Co-authored-by: Chi Wang <4250911+sonichi@users.noreply.github.com> --- website/blog/2024-11-15-CaptainAgent/index.mdx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/blog/2024-11-15-CaptainAgent/index.mdx b/website/blog/2024-11-15-CaptainAgent/index.mdx index a1abcde5c2..2f9673cf3d 100644 --- a/website/blog/2024-11-15-CaptainAgent/index.mdx +++ b/website/blog/2024-11-15-CaptainAgent/index.mdx @@ -32,7 +32,7 @@ pip install ag2[captainagent] # Using CaptainAgent without pre-specified agent/tool libraries CaptainAgent can serve as a drop-in replacement for the general `AssistantAgent` class in AG2. It also allows customization such as agent/tool libraries. When the libraries are provided, CaptainAgent will try to leverage them when solving a task. -Without the agent library and tool library, CaptainAgent will automatically generate a set of agents into a group chat. +Without them, CaptainAgent will automatically generate new agents. ```python from autogen.agentchat.contrib.captain_agent import CaptainAgent From b3b1ff5e2fc75c62f83225dcc8796c18fe8b422d Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Tue, 26 Nov 2024 03:07:48 +0000 Subject: [PATCH 06/28] Specific file inclusions Signed-off-by: Mark Sze --- MANIFEST.in | 4 +++- setup.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 25700f369d..d323b578da 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,4 @@ # Include CaptainAgent tools -recursive-include autogen/agentchat/contrib/captainagent/tools * +recursive-include autogen/agentchat/contrib/captainagent/tools *.py +include autogen/agentchat/contrib/captainagent/tools/tool_description.tsv +include autogen/agentchat/contrib/captainagent/tools/requirements.txt diff --git a/setup.py b/setup.py index f2bb9c7548..8b140d8d53 100644 --- a/setup.py +++ b/setup.py @@ -133,7 +133,9 @@ packages=setuptools.find_packages(include=["autogen*"], exclude=["test"]), package_data={ "autogen.agentchat.contrib.captainagent": [ - "tools/**/*", # CaptainAgent tools + "tools/**/*.py", # CaptainAgent tools + "tools/tool_description.tsv", + "tools/requirements.txt", ] }, include_package_data=True, From a80687af8d813beeccb97a52db0e272e2fba85c2 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Tue, 26 Nov 2024 03:47:50 +0000 Subject: [PATCH 07/28] Corrected package import on blog Signed-off-by: Mark Sze --- website/blog/2024-11-15-CaptainAgent/index.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/website/blog/2024-11-15-CaptainAgent/index.mdx b/website/blog/2024-11-15-CaptainAgent/index.mdx index 2f9673cf3d..61993fa286 100644 --- a/website/blog/2024-11-15-CaptainAgent/index.mdx +++ b/website/blog/2024-11-15-CaptainAgent/index.mdx @@ -35,7 +35,7 @@ CaptainAgent can serve as a drop-in replacement for the general `AssistantAgent` Without them, CaptainAgent will automatically generate new agents. ```python -from autogen.agentchat.contrib.captain_agent import CaptainAgent +from autogen.agentchat.contrib.captainagent import CaptainAgent from autogen import UserProxyAgent llm_config = { @@ -64,7 +64,7 @@ The tool library we provide requires subscribing to specific APIs, please refer To use agents from an agent library, you just need to specify a `library_path` sub-field or a `autobuild_tool_config` field in CaptainAgent's configuration. ```python -from autogen.agentchat.contrib.captain_agent import CaptainAgent +from autogen.agentchat.contrib.captainagent import CaptainAgent from autogen import UserProxyAgent llm_config = { From d177800e178513ecde636de6bad13ba2eeafa390 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Tue, 26 Nov 2024 04:04:51 +0000 Subject: [PATCH 08/28] Correct nested_config parameter in blog, remove manifest.in Signed-off-by: Mark Sze --- MANIFEST.in | 4 ---- website/blog/2024-11-15-CaptainAgent/index.mdx | 2 +- 2 files changed, 1 insertion(+), 5 deletions(-) delete mode 100644 MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index d323b578da..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -# Include CaptainAgent tools -recursive-include autogen/agentchat/contrib/captainagent/tools *.py -include autogen/agentchat/contrib/captainagent/tools/tool_description.tsv -include autogen/agentchat/contrib/captainagent/tools/requirements.txt diff --git a/website/blog/2024-11-15-CaptainAgent/index.mdx b/website/blog/2024-11-15-CaptainAgent/index.mdx index 61993fa286..c38486436b 100644 --- a/website/blog/2024-11-15-CaptainAgent/index.mdx +++ b/website/blog/2024-11-15-CaptainAgent/index.mdx @@ -76,7 +76,7 @@ llm_config = { captain_agent = CaptainAgent( name="captain_agent", llm_config=llm_config, - nested_mode_config=nested_mode_config, + nested_config=nested_mode_config, agent_lib="captainagent_expert_library.json", tool_lib="default", code_execution_config={"use_docker": False, "work_dir": "groupchat"}, From 63b4802520f129c2f8a560bd9a9f24bdc2e3bbf8 Mon Sep 17 00:00:00 2001 From: AgentGenie Date: Mon, 25 Nov 2024 22:32:47 -0800 Subject: [PATCH 09/28] [GraphRAG] FalkorDB graph rag integration --- .../graph_rag/falkor_graph_query_engine.py | 13 +- .../graph_rag/falkor_graph_rag_capability.py | 63 ++++ notebook/agentchat_graph_rag_falkordb.ipynb | 290 ++++++++++++++++++ setup.py | 2 +- .../graph_rag/test_falkor_graph_rag.py | 4 +- 5 files changed, 359 insertions(+), 13 deletions(-) create mode 100644 autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py create mode 100644 notebook/agentchat_graph_rag_falkordb.ipynb diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py index 64453b5307..d0670de102 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py @@ -1,11 +1,5 @@ -# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai -# -# SPDX-License-Identifier: Apache-2.0 -# -# Portions derived from https://github.com/microsoft/autogen are under the MIT License. -# SPDX-License-Identifier: MIT import os -from dataclasses import field +from dataclasses import dataclass, field from typing import List from graphrag_sdk import KnowledgeGraph, Source @@ -15,6 +9,7 @@ from .graph_query_engine import GraphStoreQueryResult +@dataclass class FalkorGraphQueryResult(GraphStoreQueryResult): messages: list = field(default_factory=list) @@ -36,7 +31,7 @@ def __init__( ): """ Initialize a Falkor DB knowledge graph. - Please also refer to https://github.com/FalkorDB/GraphRAG-SDK/blob/main/graphrag_sdk/kg.py + Please also refer to https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/kg.py Args: name (str): Knowledge graph name. @@ -45,7 +40,7 @@ def __init__( username (str|None): FalkorDB username. password (str|None): FalkorDB password. model (str): OpenAI model to use for Falkor DB to build and retrieve from the graph. - schema: Falkor DB knowledge graph schema (ontology), https://github.com/FalkorDB/GraphRAG-SDK/blob/main/graphrag_sdk/schema/schema.py + schema: Falkor DB knowledge graph schema (ontology), https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/schema/schema.py If None, Falkor DB will auto generate a schema from the input docs. """ self.knowledge_graph = KnowledgeGraph(name, host, port, username, password, model, schema) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py new file mode 100644 index 0000000000..1860047500 --- /dev/null +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py @@ -0,0 +1,63 @@ +from typing import Dict, Union + +from autogen import UserProxyAgent + +from .falkor_graph_query_engine import FalkorGraphQueryEngine, FalkorGraphQueryResult +from .graph_rag_capability import GraphRagCapability + + +class FalkorGraphRagCapability(GraphRagCapability): + """ + The Falkor graph rag capability integrate FalkorDB graphrag_sdk version: 0.1.3b0. + Ref: https://github.com/FalkorDB/GraphRAG-SDK/tree/2-move-away-from-sql-to-json-ontology-detection + + For usage, please refer to example notebook/agentchat_graph_rag_falkordb.ipynb + """ + + def __init__(self, query_engine: FalkorGraphQueryEngine): + """ + initialize graph rag capability with a graph query engine + """ + self.query_engine = query_engine + + # Graph DB query history. + self._history = [] + + def add_to_agent(self, agent: UserProxyAgent): + """ + Add FalkorDB graph RAG capability to a UserProxyAgent. + The restriction to a UserProxyAgent to make sure the returned message does not contain information retrieved from the graph DB instead of any LLMs. + """ + self.graph_rag_agent = agent + + # Validate the agent config + if agent.llm_config not in (None, False): + raise Exception( + "Graph rag capability limits the query to graph DB, llm_config must be a dict or False or None." + ) + + # Register a hook for processing the last message. + agent.register_hook(hookable_method="process_last_received_message", hook=self.process_last_received_message) + + # Append extra info to the system message. + agent.update_system_message( + agent.system_message + "\nYou've been given the special ability to use graph rag to retrieve information." + ) + + def process_last_received_message(self, message: Union[Dict, str]): + """ + Query FalkorDB before return the message. + The history with FalkorDB is also logged and updated. + """ + question = self._get_last_question(message) + result: FalkorGraphQueryResult = self.query_engine.query(question, self._history) + self._history = result.messages + return result.answer + + def _get_last_question(self, message: Union[Dict, str]): + if isinstance(message, str): + return message + if isinstance(message, Dict): + if "content" in message: + return message["content"] + return None diff --git a/notebook/agentchat_graph_rag_falkordb.ipynb b/notebook/agentchat_graph_rag_falkordb.ipynb new file mode 100644 index 0000000000..ffbab4cd03 --- /dev/null +++ b/notebook/agentchat_graph_rag_falkordb.ipynb @@ -0,0 +1,290 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Using FalkorGraphRagCapability with UserProxyAgent for Graph RAG Question Answering\n", + "\n", + "AutoGen provides graph rag integration with Agent Capability. This is an example to integrate FalkorDB (a Knowledge Graph Database)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Install Falkor DB SDK" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "1222214.22s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Requirement already satisfied: graphrag_sdk==0.1.3b0 in /home/autogen-ai/.local/lib/python3.11/site-packages (0.1.3b0)\n", + "Requirement already satisfied: bs4<0.0.3,>=0.0.2 in /home/autogen-ai/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (0.0.2)\n", + "Requirement already satisfied: falkordb<2.0.0,>=1.0.4 in /home/autogen-ai/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.0.8)\n", + "Requirement already satisfied: openai<2.0.0,>=1.30.3 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.49.0)\n", + "Requirement already satisfied: sqlalchemy<3.0.0,>=2.0.30 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (2.0.35)\n", + "Requirement already satisfied: typing-extensions<5.0.0,>=4.12.1 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (4.12.2)\n", + "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.11/site-packages (from bs4<0.0.3,>=0.0.2->graphrag_sdk==0.1.3b0) (4.12.3)\n", + "Requirement already satisfied: redis<6.0.0,>=5.0.1 in /home/autogen-ai/.local/lib/python3.11/site-packages (from falkordb<2.0.0,>=1.0.4->graphrag_sdk==0.1.3b0) (5.1.0)\n", + "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.6.0)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.9.0)\n", + "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.27.2)\n", + "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.5.0)\n", + "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.10.9)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.3.1)\n", + "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.66.5)\n", + "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.11/site-packages (from sqlalchemy<3.0.0,>=2.0.30->graphrag_sdk==0.1.3b0) (3.1.1)\n", + "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/site-packages (from anyio<5,>=3.5.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (3.10)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (2024.8.30)\n", + "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.0.5)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.14.0)\n", + "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.11/site-packages (from beautifulsoup4->bs4<0.0.3,>=0.0.2->graphrag_sdk==0.1.3b0) (2.6)\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.3.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "1222219.78s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Obtaining file:///workspaces/autogen\n", + " Installing build dependencies ... \u001b[?25ldone\n", + "\u001b[?25h Checking if build backend supports build_editable ... \u001b[?25ldone\n", + "\u001b[?25h Getting requirements to build editable ... \u001b[?25ldone\n", + "\u001b[?25h Preparing editable metadata (pyproject.toml) ... \u001b[?25ldone\n", + "\u001b[?25hRequirement already satisfied: openai>=1.3 in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.49.0)\n", + "Requirement already satisfied: diskcache in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (5.6.3)\n", + "Requirement already satisfied: termcolor in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (2.4.0)\n", + "Requirement already satisfied: flaml in /home/autogen-ai/.local/lib/python3.11/site-packages (from autogen==0.3.2) (2.2.0)\n", + "Requirement already satisfied: numpy<2,>=1.17.0 in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.26.4)\n", + "Requirement already satisfied: python-dotenv in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.0.1)\n", + "Requirement already satisfied: tiktoken in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (0.7.0)\n", + "Requirement already satisfied: pydantic!=2.6.0,<3,>=1.10 in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.10.9)\n", + "Requirement already satisfied: docker in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (7.1.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (24.1)\n", + "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (4.6.0)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (1.9.0)\n", + "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (0.27.2)\n", + "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (0.5.0)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (1.3.1)\n", + "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (4.66.5)\n", + "Requirement already satisfied: typing-extensions<5,>=4.11 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (4.12.2)\n", + "Requirement already satisfied: requests>=2.26.0 in /usr/local/lib/python3.11/site-packages (from docker->autogen==0.3.2) (2.32.3)\n", + "Requirement already satisfied: urllib3>=1.26.0 in /usr/local/lib/python3.11/site-packages (from docker->autogen==0.3.2) (2.2.3)\n", + "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.11/site-packages (from tiktoken->autogen==0.3.2) (2024.9.11)\n", + "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/site-packages (from anyio<5,>=3.5.0->openai>=1.3->autogen==0.3.2) (3.10)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai>=1.3->autogen==0.3.2) (2024.8.30)\n", + "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai>=1.3->autogen==0.3.2) (1.0.5)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai>=1.3->autogen==0.3.2) (0.14.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/site-packages (from requests>=2.26.0->docker->autogen==0.3.2) (3.3.2)\n", + "Building wheels for collected packages: autogen\n", + " Building editable for autogen (pyproject.toml) ... \u001b[?25ldone\n", + "\u001b[?25h Created wheel for autogen: filename=autogen-0.3.2-0.editable-py3-none-any.whl size=16216 sha256=6a1d2928c2581b5d66f17c8cafed5444dab21fcb5a52b4ba34fe43abbc4055ec\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-9wh6yuuq/wheels/d4/86/1f/a538740a449c67aa3ff8343698c29d70fc94236f70bde92144\n", + "Successfully built autogen\n", + "Installing collected packages: autogen\n", + " Attempting uninstall: autogen\n", + " Found existing installation: autogen 0.3.0\n", + " Uninstalling autogen-0.3.0:\n", + " Successfully uninstalled autogen-0.3.0\n", + "Successfully installed autogen-0.3.2\n", + "\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.3.1\u001b[0m\n", + "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install graphrag_sdk==0.1.3b0\n", + "\n", + "# For debug only,\n", + "# %pip install -e /workspaces/autogen" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Set OpenAI API" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "\n", + "import autogen\n", + "\n", + "config_list = autogen.config_list_from_json(env_or_file=\"OAI_CONFIG_LIST\")\n", + "os.environ[\"OPENAI_API_KEY\"] = config_list[0][\"api_key\"]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create Knowledge Graph with Your Own Data\n", + "\n", + "Note that, you would need to have a Falkor DB running ready. \n", + "In this example, Falker DB endpint is already set at host=\"172.18.0.3\" and port=6379.\n", + "For how to set up Falkor DB, please refer to https://docs.falkordb.com/" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [], + "source": [ + "from graphrag_sdk.schema import Schema\n", + "\n", + "from autogen.agentchat.contrib.graph_rag.document import Document, DocumentType\n", + "from autogen.agentchat.contrib.graph_rag.falkor_graph_query_engine import FalkorGraphQueryEngine\n", + "\n", + "# Auto generate graph schema from unstructured data\n", + "input_path = \"../test/agentchat/contrib/graph_rag/the_matrix.txt\"\n", + "\n", + "movie_schema = Schema()\n", + "actor = movie_schema.add_entity(\"Actor\").add_attribute(\"name\", str, unique=True)\n", + "movie = movie_schema.add_entity(\"Movie\").add_attribute(\"title\", str, unique=True)\n", + "movie_schema.add_relation(\"ACTED\", actor, movie)\n", + "\n", + "query_engine = FalkorGraphQueryEngine(\n", + " name=\"IMDB\",\n", + " host=\"172.18.0.3\",\n", + " port=6379,\n", + " schema=movie_schema,\n", + ")\n", + "\n", + "input_documents = [Document(doctype=DocumentType.TEXT, path_or_url=input_path)]\n", + "\n", + "query_engine.init_db(input_doc=input_documents)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create a UserProxyAgent for FalkorDB and Answer Questions" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to user_proxy):\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Name a few actors who've played in 'The Matrix'\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[31m\n", + ">>>>>>>> NO HUMAN INPUT RECEIVED.\u001b[0m\n", + "\u001b[31m\n", + ">>>>>>>> USING AUTO REPLY...\u001b[0m\n", + "\u001b[33muser_proxy\u001b[0m (to user_proxy):\n", + "\n", + "\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to user_proxy):\n", + "\n", + "List additional actors\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"A few actors who have played in 'The Matrix' are:\\n\\n- Keanu Reeves\\n- Laurence Fishburne\\n- Carrie-Anne Moss\\n- Hugo Weaving\", 'role': 'user', 'name': 'user_proxy'}, {'content': '', 'role': 'assistant', 'name': 'user_proxy'}, {'content': None, 'role': 'user', 'name': 'user_proxy'}, {'content': 'List additional actors', 'role': 'assistant', 'name': 'user_proxy'}, {'content': 'The additional actors in the knowledge graph, ordered alphabetically by name, are:\\n\\n- Carrie-Anne Moss\\n- Hugo Weaving\\n- Keanu Reeves\\n- Lana Wachowski\\n- Laurence Fishburne\\n- Lilly Wachowski', 'role': 'user', 'name': 'user_proxy'}], summary='The additional actors in the knowledge graph, ordered alphabetically by name, are:\\n\\n- Carrie-Anne Moss\\n- Hugo Weaving\\n- Keanu Reeves\\n- Lana Wachowski\\n- Laurence Fishburne\\n- Lilly Wachowski', cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['', 'List additional actors', 'exit'])" + ] + }, + "execution_count": 18, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from autogen import UserProxyAgent\n", + "from autogen.agentchat.contrib.graph_rag.falkor_graph_rag_capability import FalkorGraphRagCapability\n", + "\n", + "graph_rag_agent = UserProxyAgent(\n", + " name=\"user_proxy\",\n", + " code_execution_config=False,\n", + " is_termination_msg=lambda msg: \"TERMINATE\" in msg[\"content\"],\n", + " human_input_mode=\"ALWAYS\",\n", + ")\n", + "graph_rag_capability = FalkorGraphRagCapability(query_engine)\n", + "graph_rag_capability.add_to_agent(graph_rag_agent)\n", + "\n", + "graph_rag_agent.initiate_chat(graph_rag_agent, message=\"Name a few actors who've played in 'The Matrix'\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.10" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/setup.py b/setup.py index 16abda3298..c2b73cc2a0 100644 --- a/setup.py +++ b/setup.py @@ -61,7 +61,7 @@ retrieve_chat_pgvector = [*retrieve_chat, "pgvector>=0.2.5"] graph_rag_falkor_db = [ - "graphrag_sdk", + "graphrag_sdk==0.1.3b0", ] if current_os in ["Windows", "Darwin"]: diff --git a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py index 8c22edb846..2f56507be9 100644 --- a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py +++ b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py @@ -8,9 +8,7 @@ import pytest from conftest import reason, skip_openai # noqa: E402 -from graphrag_sdk import KnowledgeGraph, Source -from graphrag_sdk.ontology import Ontology - +from graphrag_sdk.schema import Schema try: from autogen.agentchat.contrib.graph_rag.document import ( Document, From e9ea6f7390d4e475f93ec28e4e877c172fc255ff Mon Sep 17 00:00:00 2001 From: AgentGenie Date: Mon, 25 Nov 2024 23:22:51 -0800 Subject: [PATCH 10/28] fix test --- .../agentchat/contrib/graph_rag/falkor_graph_query_engine.py | 4 ++++ .../contrib/graph_rag/falkor_graph_rag_capability.py | 4 ++++ test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py | 3 ++- 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py index d0670de102..b2940c99a6 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py @@ -1,3 +1,7 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + import os from dataclasses import dataclass, field from typing import List diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py index 1860047500..2e99595e93 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py @@ -1,3 +1,7 @@ +# Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai +# +# SPDX-License-Identifier: Apache-2.0 + from typing import Dict, Union from autogen import UserProxyAgent diff --git a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py index 2f56507be9..6f54126e3e 100644 --- a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py +++ b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py @@ -9,6 +9,7 @@ import pytest from conftest import reason, skip_openai # noqa: E402 from graphrag_sdk.schema import Schema + try: from autogen.agentchat.contrib.graph_rag.document import ( Document, @@ -38,7 +39,7 @@ def test_falkor_db_query_engine(): 3. Query it with a question and verify the result contains the critical information. """ # Arrange - test_schema = Ontology() + test_schema = Schema() actor = test_schema.add_entity("Actor").add_attribute("name", str, unique=True) movie = test_schema.add_entity("Movie").add_attribute("title", str, unique=True) test_schema.add_relation("ACTED", actor, movie) From d2587843b25a7b2b92de4921a2035a9170949cd6 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Tue, 26 Nov 2024 22:28:22 +0000 Subject: [PATCH 11/28] setuptools for CaptainAgent tools without warnings, pysqlite3 binary so no compile, plus blog code fix and model update Signed-off-by: Mark Sze --- .../agentchat/contrib/captainagent/tools/__init__.py | 0 setup.py | 12 +++++++++--- website/blog/2024-11-15-CaptainAgent/index.mdx | 8 ++++---- 3 files changed, 13 insertions(+), 7 deletions(-) create mode 100644 autogen/agentchat/contrib/captainagent/tools/__init__.py diff --git a/autogen/agentchat/contrib/captainagent/tools/__init__.py b/autogen/agentchat/contrib/captainagent/tools/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/setup.py b/setup.py index 8b140d8d53..e551daa664 100644 --- a/setup.py +++ b/setup.py @@ -69,7 +69,8 @@ elif current_os == "Linux": retrieve_chat_pgvector.extend(["psycopg>=3.1.18"]) -autobuild = ["chromadb", "sentence-transformers", "huggingface-hub", "pysqlite3"] +# pysqlite3-binary used so it doesn't need to compile pysqlite3 +autobuild = ["chromadb", "sentence-transformers", "huggingface-hub", "pysqlite3-binary"] extra_require = { "test": [ @@ -130,10 +131,15 @@ long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/ag2ai/ag2", - packages=setuptools.find_packages(include=["autogen*"], exclude=["test"]), + packages=setuptools.find_namespace_packages( + include=[ + "autogen*", + "autogen.agentchat.contrib.captainagent.tools*", + ], + exclude=["test"], + ), package_data={ "autogen.agentchat.contrib.captainagent": [ - "tools/**/*.py", # CaptainAgent tools "tools/tool_description.tsv", "tools/requirements.txt", ] diff --git a/website/blog/2024-11-15-CaptainAgent/index.mdx b/website/blog/2024-11-15-CaptainAgent/index.mdx index c38486436b..f3d67101c4 100644 --- a/website/blog/2024-11-15-CaptainAgent/index.mdx +++ b/website/blog/2024-11-15-CaptainAgent/index.mdx @@ -36,11 +36,11 @@ Without them, CaptainAgent will automatically generate new agents. ```python from autogen.agentchat.contrib.captainagent import CaptainAgent -from autogen import UserProxyAgent +from autogen import UserProxyAgent, config_list_from_json llm_config = { "temperature": 0, - "config_list": autogen.config_list_from_json("OAI_CONFIG_LIST", filter_dict={"model": ["gpt-4o-mini"]}), + "config_list": config_list_from_json("OAI_CONFIG_LIST", filter_dict={"model": ["gpt-4o"]}), } ## build agents @@ -65,11 +65,11 @@ To use agents from an agent library, you just need to specify a `library_path` s ```python from autogen.agentchat.contrib.captainagent import CaptainAgent -from autogen import UserProxyAgent +from autogen import UserProxyAgent, config_list_from_json llm_config = { "temperature": 0, - "config_list": autogen.config_list_from_json("OAI_CONFIG_LIST", filter_dict={"model": ["gpt-4-1106-preview"]}), + "config_list": config_list_from_json("OAI_CONFIG_LIST", filter_dict={"model": ["gpt-4o"]}), } ## build agents From 96ea4554d2afa7f5e0e18ba149384245f950a967 Mon Sep 17 00:00:00 2001 From: AgentGenie Date: Tue, 26 Nov 2024 23:21:11 -0800 Subject: [PATCH 12/28] Use register_reply to generate reply instead of updating messages. --- .../graph_rag/falkor_graph_rag_capability.py | 38 ++-- notebook/agentchat_graph_rag_falkordb.ipynb | 164 +++++++----------- 2 files changed, 83 insertions(+), 119 deletions(-) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py index 2e99595e93..bf6ed5c9a0 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py @@ -2,9 +2,9 @@ # # SPDX-License-Identifier: Apache-2.0 -from typing import Dict, Union +from typing import Any, Dict, List, Optional, Tuple, Union -from autogen import UserProxyAgent +from autogen import Agent, ConversableAgent, UserProxyAgent from .falkor_graph_query_engine import FalkorGraphQueryEngine, FalkorGraphQueryResult from .graph_rag_capability import GraphRagCapability @@ -40,23 +40,35 @@ def add_to_agent(self, agent: UserProxyAgent): "Graph rag capability limits the query to graph DB, llm_config must be a dict or False or None." ) - # Register a hook for processing the last message. - agent.register_hook(hookable_method="process_last_received_message", hook=self.process_last_received_message) - - # Append extra info to the system message. - agent.update_system_message( - agent.system_message + "\nYou've been given the special ability to use graph rag to retrieve information." + # Register method to generate reply + agent.register_reply( + [ConversableAgent, None], self._reply_using_falkordb_query, position=0, remove_other_reply_funcs=True ) - def process_last_received_message(self, message: Union[Dict, str]): + def _reply_using_falkordb_query( + self, + recipient: ConversableAgent, + messages: Optional[List[Dict]] = None, + sender: Optional[Agent] = None, + config: Optional[Any] = None, + ) -> Tuple[bool, Union[str, Dict, None]]: """ - Query FalkorDB before return the message. + Query FalkorDB before return the message. Internally, it invokes the OpenAI assistant to generate a reply based on the given messages. The history with FalkorDB is also logged and updated. + + Args: + recipient: The agent instance that will receive the message. + messages: A list of messages in the conversation history with the sender. + sender: The agent instance that sent the message. + config: Optional configuration for message processing. + + Returns: + A tuple containing a boolean indicating success and the assistant's reply. """ - question = self._get_last_question(message) - result: FalkorGraphQueryResult = self.query_engine.query(question, self._history) + question = self._get_last_question(messages[-1]) + result: FalkorGraphQueryResult = self.query_engine.query(question, messages=self._history) self._history = result.messages - return result.answer + return True, result.answer if result.answer else "I'm sorry, I don't have an answer for that." def _get_last_question(self, message: Union[Dict, str]): if isinstance(message, str): diff --git a/notebook/agentchat_graph_rag_falkordb.ipynb b/notebook/agentchat_graph_rag_falkordb.ipynb index ffbab4cd03..be72873b98 100644 --- a/notebook/agentchat_graph_rag_falkordb.ipynb +++ b/notebook/agentchat_graph_rag_falkordb.ipynb @@ -18,14 +18,14 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ - "1222214.22s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n" + "2254.85s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n" ] }, { @@ -33,98 +33,33 @@ "output_type": "stream", "text": [ "Defaulting to user installation because normal site-packages is not writeable\n", - "Requirement already satisfied: graphrag_sdk==0.1.3b0 in /home/autogen-ai/.local/lib/python3.11/site-packages (0.1.3b0)\n", - "Requirement already satisfied: bs4<0.0.3,>=0.0.2 in /home/autogen-ai/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (0.0.2)\n", - "Requirement already satisfied: falkordb<2.0.0,>=1.0.4 in /home/autogen-ai/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.0.8)\n", - "Requirement already satisfied: openai<2.0.0,>=1.30.3 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.49.0)\n", - "Requirement already satisfied: sqlalchemy<3.0.0,>=2.0.30 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (2.0.35)\n", + "Requirement already satisfied: graphrag_sdk==0.1.3b0 in /home/autogen/.local/lib/python3.11/site-packages (0.1.3b0)\n", + "Requirement already satisfied: bs4<0.0.3,>=0.0.2 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (0.0.2)\n", + "Requirement already satisfied: falkordb<2.0.0,>=1.0.4 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.0.10)\n", + "Requirement already satisfied: openai<2.0.0,>=1.30.3 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.55.0)\n", + "Requirement already satisfied: sqlalchemy<3.0.0,>=2.0.30 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (2.0.36)\n", "Requirement already satisfied: typing-extensions<5.0.0,>=4.12.1 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (4.12.2)\n", "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.11/site-packages (from bs4<0.0.3,>=0.0.2->graphrag_sdk==0.1.3b0) (4.12.3)\n", - "Requirement already satisfied: redis<6.0.0,>=5.0.1 in /home/autogen-ai/.local/lib/python3.11/site-packages (from falkordb<2.0.0,>=1.0.4->graphrag_sdk==0.1.3b0) (5.1.0)\n", - "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.6.0)\n", + "Requirement already satisfied: redis<6.0.0,>=5.0.1 in /home/autogen/.local/lib/python3.11/site-packages (from falkordb<2.0.0,>=1.0.4->graphrag_sdk==0.1.3b0) (5.2.0)\n", + "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.6.2.post1)\n", "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.9.0)\n", "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.27.2)\n", - "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.5.0)\n", + "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.7.1)\n", "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.10.9)\n", "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.3.1)\n", - "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.66.5)\n", + "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.67.1)\n", "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.11/site-packages (from sqlalchemy<3.0.0,>=2.0.30->graphrag_sdk==0.1.3b0) (3.1.1)\n", "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/site-packages (from anyio<5,>=3.5.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (3.10)\n", "Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (2024.8.30)\n", - "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.0.5)\n", + "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.0.7)\n", "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.14.0)\n", "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.11/site-packages (from beautifulsoup4->bs4<0.0.3,>=0.0.2->graphrag_sdk==0.1.3b0) (2.6)\n", - "\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.3.1\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", - "Note: you may need to restart the kernel to use updated packages.\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "1222219.78s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Defaulting to user installation because normal site-packages is not writeable\n", - "Obtaining file:///workspaces/autogen\n", - " Installing build dependencies ... \u001b[?25ldone\n", - "\u001b[?25h Checking if build backend supports build_editable ... \u001b[?25ldone\n", - "\u001b[?25h Getting requirements to build editable ... \u001b[?25ldone\n", - "\u001b[?25h Preparing editable metadata (pyproject.toml) ... \u001b[?25ldone\n", - "\u001b[?25hRequirement already satisfied: openai>=1.3 in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.49.0)\n", - "Requirement already satisfied: diskcache in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (5.6.3)\n", - "Requirement already satisfied: termcolor in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (2.4.0)\n", - "Requirement already satisfied: flaml in /home/autogen-ai/.local/lib/python3.11/site-packages (from autogen==0.3.2) (2.2.0)\n", - "Requirement already satisfied: numpy<2,>=1.17.0 in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.26.4)\n", - "Requirement already satisfied: python-dotenv in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.0.1)\n", - "Requirement already satisfied: tiktoken in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (0.7.0)\n", - "Requirement already satisfied: pydantic!=2.6.0,<3,>=1.10 in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (1.10.9)\n", - "Requirement already satisfied: docker in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (7.1.0)\n", - "Requirement already satisfied: packaging in /usr/local/lib/python3.11/site-packages (from autogen==0.3.2) (24.1)\n", - "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (4.6.0)\n", - "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (1.9.0)\n", - "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (0.27.2)\n", - "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (0.5.0)\n", - "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (1.3.1)\n", - "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (4.66.5)\n", - "Requirement already satisfied: typing-extensions<5,>=4.11 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->autogen==0.3.2) (4.12.2)\n", - "Requirement already satisfied: requests>=2.26.0 in /usr/local/lib/python3.11/site-packages (from docker->autogen==0.3.2) (2.32.3)\n", - "Requirement already satisfied: urllib3>=1.26.0 in /usr/local/lib/python3.11/site-packages (from docker->autogen==0.3.2) (2.2.3)\n", - "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.11/site-packages (from tiktoken->autogen==0.3.2) (2024.9.11)\n", - "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/site-packages (from anyio<5,>=3.5.0->openai>=1.3->autogen==0.3.2) (3.10)\n", - "Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai>=1.3->autogen==0.3.2) (2024.8.30)\n", - "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai>=1.3->autogen==0.3.2) (1.0.5)\n", - "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai>=1.3->autogen==0.3.2) (0.14.0)\n", - "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/site-packages (from requests>=2.26.0->docker->autogen==0.3.2) (3.3.2)\n", - "Building wheels for collected packages: autogen\n", - " Building editable for autogen (pyproject.toml) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for autogen: filename=autogen-0.3.2-0.editable-py3-none-any.whl size=16216 sha256=6a1d2928c2581b5d66f17c8cafed5444dab21fcb5a52b4ba34fe43abbc4055ec\n", - " Stored in directory: /tmp/pip-ephem-wheel-cache-9wh6yuuq/wheels/d4/86/1f/a538740a449c67aa3ff8343698c29d70fc94236f70bde92144\n", - "Successfully built autogen\n", - "Installing collected packages: autogen\n", - " Attempting uninstall: autogen\n", - " Found existing installation: autogen 0.3.0\n", - " Uninstalling autogen-0.3.0:\n", - " Successfully uninstalled autogen-0.3.0\n", - "Successfully installed autogen-0.3.2\n", - "\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m A new release of pip is available: \u001b[0m\u001b[31;49m24.2\u001b[0m\u001b[39;49m -> \u001b[0m\u001b[32;49m24.3.1\u001b[0m\n", - "\u001b[1m[\u001b[0m\u001b[34;49mnotice\u001b[0m\u001b[1;39;49m]\u001b[0m\u001b[39;49m To update, run: \u001b[0m\u001b[32;49mpip install --upgrade pip\u001b[0m\n", "Note: you may need to restart the kernel to use updated packages.\n" ] } ], "source": [ - "%pip install graphrag_sdk==0.1.3b0\n", - "\n", - "# For debug only,\n", - "# %pip install -e /workspaces/autogen" + "%pip install graphrag_sdk==0.1.3b0" ] }, { @@ -141,7 +76,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -159,14 +94,15 @@ "source": [ "## Create Knowledge Graph with Your Own Data\n", "\n", - "Note that, you would need to have a Falkor DB running ready. \n", - "In this example, Falker DB endpint is already set at host=\"172.18.0.3\" and port=6379.\n", + "Note that, you would need to have a Falkor DB running ready. If you use docker, please set up docker network properly.\n", + "\n", + "In this example, Falker DB endpint is already set at host=\"172.18.0.2\" and port=6379.\n", "For how to set up Falkor DB, please refer to https://docs.falkordb.com/" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -185,7 +121,7 @@ "\n", "query_engine = FalkorGraphQueryEngine(\n", " name=\"IMDB\",\n", - " host=\"172.18.0.3\",\n", + " host=\"172.18.0.2\",\n", " port=6379,\n", " schema=movie_schema,\n", ")\n", @@ -204,36 +140,46 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\u001b[33muser_proxy\u001b[0m (to user_proxy):\n", - "\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ + "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", + "\n", "Name a few actors who've played in 'The Matrix'\n", "\n", "--------------------------------------------------------------------------------\n", - "\u001b[31m\n", - ">>>>>>>> NO HUMAN INPUT RECEIVED.\u001b[0m\n", - "\u001b[31m\n", - ">>>>>>>> USING AUTO REPLY...\u001b[0m\n", - "\u001b[33muser_proxy\u001b[0m (to user_proxy):\n", + "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", + "\n", + "The actors who've played in 'The Matrix' include:\n", "\n", + "- Keanu Reeves\n", + "- Laurence Fishburne\n", + "- Carrie-Anne Moss\n", + "- Hugo Weaving\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", + "\n", + "List additional actors.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", + "It appears that the actors I initially provided exhaust the list available in our current knowledge graph. There are no additional actors listed for 'The Matrix' beyond Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving. If the film had more actors, they are not present in our graph at this time.\n", "\n", "--------------------------------------------------------------------------------\n", - "\u001b[33muser_proxy\u001b[0m (to user_proxy):\n", + "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", "\n", - "List additional actors\n", + "Who is Lana Wachowski?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", + "\n", + "I'm sorry, I don't have an answer for that.\n", "\n", "--------------------------------------------------------------------------------\n" ] @@ -241,28 +187,34 @@ { "data": { "text/plain": [ - "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"A few actors who have played in 'The Matrix' are:\\n\\n- Keanu Reeves\\n- Laurence Fishburne\\n- Carrie-Anne Moss\\n- Hugo Weaving\", 'role': 'user', 'name': 'user_proxy'}, {'content': '', 'role': 'assistant', 'name': 'user_proxy'}, {'content': None, 'role': 'user', 'name': 'user_proxy'}, {'content': 'List additional actors', 'role': 'assistant', 'name': 'user_proxy'}, {'content': 'The additional actors in the knowledge graph, ordered alphabetically by name, are:\\n\\n- Carrie-Anne Moss\\n- Hugo Weaving\\n- Keanu Reeves\\n- Lana Wachowski\\n- Laurence Fishburne\\n- Lilly Wachowski', 'role': 'user', 'name': 'user_proxy'}], summary='The additional actors in the knowledge graph, ordered alphabetically by name, are:\\n\\n- Carrie-Anne Moss\\n- Hugo Weaving\\n- Keanu Reeves\\n- Lana Wachowski\\n- Laurence Fishburne\\n- Lilly Wachowski', cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['', 'List additional actors', 'exit'])" + "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"The actors who've played in 'The Matrix' include:\\n\\n- Keanu Reeves\\n- Laurence Fishburne\\n- Carrie-Anne Moss\\n- Hugo Weaving\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'List additional actors.', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"It appears that the actors I initially provided exhaust the list available in our current knowledge graph. There are no additional actors listed for 'The Matrix' beyond Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving. If the film had more actors, they are not present in our graph at this time.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Who is Lana Wachowski?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"I'm sorry, I don't have an answer for that.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"I'm sorry, I don't have an answer for that.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['List additional actors.', 'Who is Lana Wachowski?', 'exit'])" ] }, - "execution_count": 18, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "from autogen import UserProxyAgent\n", + "from autogen import ConversableAgent, UserProxyAgent\n", "from autogen.agentchat.contrib.graph_rag.falkor_graph_rag_capability import FalkorGraphRagCapability\n", "\n", - "graph_rag_agent = UserProxyAgent(\n", + "graph_rag_agent = ConversableAgent(\n", + " name=\"matrix_agent\",\n", + " human_input_mode=\"NEVER\",\n", + ")\n", + "\n", + "graph_rag_capability = FalkorGraphRagCapability(query_engine)\n", + "graph_rag_capability.add_to_agent(graph_rag_agent)\n", + "\n", + "user_proxy = UserProxyAgent(\n", " name=\"user_proxy\",\n", " code_execution_config=False,\n", " is_termination_msg=lambda msg: \"TERMINATE\" in msg[\"content\"],\n", " human_input_mode=\"ALWAYS\",\n", ")\n", - "graph_rag_capability = FalkorGraphRagCapability(query_engine)\n", - "graph_rag_capability.add_to_agent(graph_rag_agent)\n", "\n", - "graph_rag_agent.initiate_chat(graph_rag_agent, message=\"Name a few actors who've played in 'The Matrix'\")" + "user_proxy.initiate_chat(graph_rag_agent, message=\"Name a few actors who've played in 'The Matrix'\")" ] } ], From eb6f9ea6915d8be15043b2bde1f1897efb525a9a Mon Sep 17 00:00:00 2001 From: Eric Date: Wed, 27 Nov 2024 12:12:13 -0600 Subject: [PATCH 13/28] Update MAINTAINERS.md --- MAINTAINERS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 4b9e21c3c9..3228abc702 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -20,6 +20,7 @@ | Aaron Ward | [AaronWard](https://github.com/AaronWard) | yappstore.ai | all | | Rudy Wu | [rudyalways](https://github.com/rudyalways) | Google | all, group chats, sequential chats | | Haiyang Li | [ohdearquant](https://github.com/ohdearquant) | - | all, sequential chats, structured output, low-level| +| Eric Moore | [emooreatx](https://github.com/emooreatx) | IBM | all| **Pending Maintainers list (Marked with \*, Waiting for explicit approval from the maintainers)** | Name | GitHub Handle | Organization | Features | From 230d6f68a37ae1cefee3dd7b9a1abf5de2fbc7f5 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Wed, 27 Nov 2024 22:23:35 +0000 Subject: [PATCH 14/28] Fix to remove added context_variables to tool call in messages Signed-off-by: Mark Sze --- autogen/agentchat/contrib/swarm_agent.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/autogen/agentchat/contrib/swarm_agent.py b/autogen/agentchat/contrib/swarm_agent.py index c1c790a906..f733adf3be 100644 --- a/autogen/agentchat/contrib/swarm_agent.py +++ b/autogen/agentchat/contrib/swarm_agent.py @@ -372,6 +372,8 @@ def generate_swarm_tool_reply( contents = [] for index in range(tool_calls): + func_has_context_variables = False + # 1. add context_variables to the tool call arguments tool_call = message["tool_calls"][index] @@ -385,6 +387,7 @@ def generate_swarm_tool_reply( # Check if function has context_variables parameter sig = signature(func) if __CONTEXT_VARIABLES_PARAM_NAME__ in sig.parameters: + func_has_context_variables = True current_args = json.loads(tool_call["function"]["arguments"]) current_args[__CONTEXT_VARIABLES_PARAM_NAME__] = self._context_variables # Update the tool call with new arguments @@ -400,6 +403,13 @@ def generate_swarm_tool_reply( # 2. generate tool calls reply _, tool_message = self.generate_tool_calls_reply([message_copy]) + # Remove the context variables from the tool_call message so it + # doesn't show up in the chat history + if func_has_context_variables: + post_run_args = json.loads(tool_call["function"]["arguments"]) + del post_run_args[__CONTEXT_VARIABLES_PARAM_NAME__] + tool_call["function"]["arguments"] = json.dumps(post_run_args) + # 3. update context_variables and next_agent, convert content to string for tool_response in tool_message["tool_responses"]: content = tool_response.get("content") From d0610841564a8867131178f75fdd7b4bdb8a2bfb Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Thu, 28 Nov 2024 01:41:28 +0000 Subject: [PATCH 15/28] Tidy up code Signed-off-by: Mark Sze --- autogen/agentchat/contrib/swarm_agent.py | 30 ++++++++---------------- 1 file changed, 10 insertions(+), 20 deletions(-) diff --git a/autogen/agentchat/contrib/swarm_agent.py b/autogen/agentchat/contrib/swarm_agent.py index f733adf3be..6748ca2f7f 100644 --- a/autogen/agentchat/contrib/swarm_agent.py +++ b/autogen/agentchat/contrib/swarm_agent.py @@ -1,6 +1,7 @@ # Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai # # SPDX-License-Identifier: Apache-2.0 +import copy import json from dataclasses import dataclass from enum import Enum @@ -364,18 +365,19 @@ def generate_swarm_tool_reply( message = messages[-1] if "tool_calls" in message: - tool_calls = len(message["tool_calls"]) + tool_call_count = len(message["tool_calls"]) # Loop through tool calls individually (so context can be updated after each function call) next_agent = None tool_responses_inner = [] contents = [] - for index in range(tool_calls): + for index in range(tool_call_count): - func_has_context_variables = False + # Deep copy to ensure no changes to messages when we insert the context variables + message_copy = copy.deepcopy(message) # 1. add context_variables to the tool call arguments - tool_call = message["tool_calls"][index] + tool_call = message_copy["tool_calls"][index] if tool_call["type"] == "function": function_name = tool_call["function"]["name"] @@ -384,32 +386,20 @@ def generate_swarm_tool_reply( if function_name in self._function_map: func = self._function_map[function_name] # Get the original function - # Check if function has context_variables parameter + # Inject the context variables into the tool call if it has the parameter sig = signature(func) if __CONTEXT_VARIABLES_PARAM_NAME__ in sig.parameters: - func_has_context_variables = True + current_args = json.loads(tool_call["function"]["arguments"]) current_args[__CONTEXT_VARIABLES_PARAM_NAME__] = self._context_variables - # Update the tool call with new arguments tool_call["function"]["arguments"] = json.dumps(current_args) - # Copy the message - message_copy = message.copy() - tool_calls_copy = message_copy["tool_calls"] - - # remove all the tool calls except the one at the index - message_copy["tool_calls"] = [tool_calls_copy[index]] + # Ensure we are only executing the one tool at a time + message_copy["tool_calls"] = [tool_call] # 2. generate tool calls reply _, tool_message = self.generate_tool_calls_reply([message_copy]) - # Remove the context variables from the tool_call message so it - # doesn't show up in the chat history - if func_has_context_variables: - post_run_args = json.loads(tool_call["function"]["arguments"]) - del post_run_args[__CONTEXT_VARIABLES_PARAM_NAME__] - tool_call["function"]["arguments"] = json.dumps(post_run_args) - # 3. update context_variables and next_agent, convert content to string for tool_response in tool_message["tool_responses"]: content = tool_response.get("content") From 72850719367ade60bb44a76f828fd346ca77119e Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Thu, 28 Nov 2024 03:33:04 +0000 Subject: [PATCH 16/28] Restrict license checks to staged/committed files only Signed-off-by: Mark Sze --- scripts/pre-commit-license-check.py | 66 ++++++++++++++++++++++++----- 1 file changed, 55 insertions(+), 11 deletions(-) diff --git a/scripts/pre-commit-license-check.py b/scripts/pre-commit-license-check.py index 93ba54c2be..1911115a08 100644 --- a/scripts/pre-commit-license-check.py +++ b/scripts/pre-commit-license-check.py @@ -2,13 +2,57 @@ # # SPDX-License-Identifier: Apache-2.0 #!/usr/bin/env python3 +import json +import os import re +import subprocess import sys from pathlib import Path REQUIRED_ELEMENTS = [r"Copyright.*Owners of https://github\.com/ag2ai", r"SPDX-License-Identifier: Apache-2\.0"] +def get_changed_files(): + """Get list of Python files changed in this PR/push.""" + try: + # If running in GitHub Actions PR + if os.getenv("GITHUB_EVENT_PATH"): + with open(os.getenv("GITHUB_EVENT_PATH")) as f: + event = json.load(f) + + # For pull requests + if os.getenv("GITHUB_EVENT_NAME") == "pull_request": + # Use the files listed in the PR event + changed_files = [] + for file in event["pull_request"]["changed_files"]: + filename = file.get("filename", "") + if filename.endswith(".py"): + changed_files.append(Path(filename)) + return changed_files + # For pushes + else: + result = subprocess.run( + ["git", "diff", "--name-only", "HEAD^", "HEAD"], capture_output=True, text=True, check=True + ) + # If running locally, check staged files + else: + result = subprocess.run( + ["git", "diff", "--cached", "--name-only", "--diff-filter=AMR"], + capture_output=True, + text=True, + check=True, + ) + + # Filter for Python files and convert to Path objects + return [Path(file) for file in result.stdout.splitlines() if file.endswith(".py")] + except subprocess.CalledProcessError as e: + print(f"Error getting changed files: {e}") + sys.exit(1) + except Exception as e: + print(f"Error processing files: {e}") + sys.exit(1) + + def should_check_file(file_path: Path) -> bool: # Skip __init__.py files return file_path.name != "__init__.py" @@ -30,10 +74,20 @@ def check_file_header(file_path): def main(): failed = False - for py_file in Path(".").rglob("*.py"): + changed_files = get_changed_files() + + if not changed_files: + print("No Python files were changed.") + return + + for py_file in changed_files: if not should_check_file(py_file): continue + if not py_file.exists(): + print(f"Warning: File {py_file} no longer exists (may have been deleted)") + continue + missing_elements = check_file_header(py_file) if missing_elements: failed = True @@ -42,16 +96,6 @@ def main(): "\nSee https://ag2ai.github.io/ag2/docs/contributor-guide/contributing/#license-headers for guidance." ) - """ - # For more detailed output: - print("Missing required elements:") - for element in missing_elements: - print(f" - {element}") - print("\nHeader should contain:") - print(" 1. Copyright notice with 'Owners of https://github.com/ag2ai'") - print(" 2. SPDX-License-Identifier: Apache-2.0") - """ - sys.exit(1 if failed else 0) From 90939275350010a310d36357edcc623b4ba2c68c Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Thu, 28 Nov 2024 03:46:36 +0000 Subject: [PATCH 17/28] Update due to GitHub action error Signed-off-by: Mark Sze --- scripts/pre-commit-license-check.py | 129 ++++++++++++++++------------ 1 file changed, 73 insertions(+), 56 deletions(-) diff --git a/scripts/pre-commit-license-check.py b/scripts/pre-commit-license-check.py index 1911115a08..08670e7fed 100644 --- a/scripts/pre-commit-license-check.py +++ b/scripts/pre-commit-license-check.py @@ -8,95 +8,112 @@ import subprocess import sys from pathlib import Path +from typing import List REQUIRED_ELEMENTS = [r"Copyright.*Owners of https://github\.com/ag2ai", r"SPDX-License-Identifier: Apache-2\.0"] -def get_changed_files(): - """Get list of Python files changed in this PR/push.""" +def get_github_pr_files() -> List[Path]: + """Get list of Python files changed in a GitHub PR.""" try: - # If running in GitHub Actions PR if os.getenv("GITHUB_EVENT_PATH"): with open(os.getenv("GITHUB_EVENT_PATH")) as f: event = json.load(f) - # For pull requests + # For pull requests, get changed files from the event payload if os.getenv("GITHUB_EVENT_NAME") == "pull_request": - # Use the files listed in the PR event changed_files = [] - for file in event["pull_request"]["changed_files"]: + for file in event.get("pull_request", {}).get("changed_files", []): filename = file.get("filename", "") if filename.endswith(".py"): changed_files.append(Path(filename)) return changed_files - # For pushes + + # For push events, use git diff else: result = subprocess.run( ["git", "diff", "--name-only", "HEAD^", "HEAD"], capture_output=True, text=True, check=True ) - # If running locally, check staged files - else: - result = subprocess.run( - ["git", "diff", "--cached", "--name-only", "--diff-filter=AMR"], - capture_output=True, - text=True, - check=True, - ) - - # Filter for Python files and convert to Path objects - return [Path(file) for file in result.stdout.splitlines() if file.endswith(".py")] - except subprocess.CalledProcessError as e: - print(f"Error getting changed files: {e}") - sys.exit(1) + return [Path(file) for file in result.stdout.splitlines() if file.endswith(".py")] except Exception as e: - print(f"Error processing files: {e}") - sys.exit(1) + print(f"Error getting PR files: {e}") + return [] -def should_check_file(file_path: Path) -> bool: - # Skip __init__.py files - return file_path.name != "__init__.py" +def get_staged_files() -> List[Path]: + """Get list of staged Python files using git command.""" + try: + result = subprocess.run( + ["git", "diff", "--cached", "--name-only", "--diff-filter=AMR"], capture_output=True, text=True, check=True + ) + files = result.stdout.splitlines() + return [Path(file) for file in files if file.endswith(".py")] + except subprocess.CalledProcessError as e: + print(f"Error getting staged files: {e}") + return [] -def check_file_header(file_path): - with open(file_path, "r", encoding="utf-8") as f: - # Read first few lines of the file - content = f.read(500) +def should_check_file(file_path: Path) -> bool: + """Skip __init__.py files and check if file exists.""" + return file_path.name != "__init__.py" and file_path.exists() - # Check if all required elements are present near the start of the file - missing_elements = [] - for pattern in REQUIRED_ELEMENTS: - if not re.search(pattern, content[:500], re.IGNORECASE): - missing_elements.append(pattern) - return missing_elements +def check_file_header(file_path: Path) -> List[str]: + """Check if file has required license headers.""" + try: + with open(file_path, "r", encoding="utf-8") as f: + content = f.read(500) + missing_elements = [] + for pattern in REQUIRED_ELEMENTS: + if not re.search(pattern, content[:500], re.IGNORECASE): + missing_elements.append(pattern) + return missing_elements + except Exception as e: + print(f"Error processing file {file_path}: {e}") + return [] -def main(): - failed = False - changed_files = get_changed_files() +def get_files_to_check() -> List[Path]: + """Determine which files to check based on environment.""" + try: + if "--all-files" in sys.argv: + return list(Path(".").rglob("*.py")) - if not changed_files: - print("No Python files were changed.") - return + if os.getenv("GITHUB_ACTIONS") == "true": + return get_github_pr_files() - for py_file in changed_files: - if not should_check_file(py_file): - continue + return get_staged_files() + except Exception as e: + print(f"Error getting files to check: {e}") + return [] - if not py_file.exists(): - print(f"Warning: File {py_file} no longer exists (may have been deleted)") - continue - missing_elements = check_file_header(py_file) - if missing_elements: - failed = True - print(f"\nIncomplete or missing license header in: {py_file}") - print( - "\nSee https://ag2ai.github.io/ag2/docs/contributor-guide/contributing/#license-headers for guidance." - ) +def main() -> None: + """Main function to check license headers.""" + try: + failed = False + files_to_check = get_files_to_check() + + if not files_to_check: + print("No Python files to check") + return + + for py_file in files_to_check: + if not should_check_file(py_file): + continue + + missing_elements = check_file_header(py_file) + if missing_elements: + failed = True + print(f"\nIncomplete or missing license header in: {py_file}") + print( + "\nSee https://ag2ai.github.io/ag2/docs/contributor-guide/contributing/#license-headers for guidance." + ) - sys.exit(1 if failed else 0) + sys.exit(1 if failed else 0) + except Exception as e: + print(f"Error in main: {e}") + sys.exit(1) if __name__ == "__main__": From 84f2e2598d0e930d1d9b53d8c23da830c5f12ee3 Mon Sep 17 00:00:00 2001 From: Qingyun Wu Date: Thu, 28 Nov 2024 13:55:24 -0800 Subject: [PATCH 18/28] add downloads badge --- README.md | 23 +++++++++-------------- 1 file changed, 9 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 77464d16c5..e0f56e8302 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@ +![Pypi Downloads](https://img.shields.io/pypi/dm/pyautogen?label=PyPI%20downloads) [![PyPI version](https://badge.fury.io/py/autogen.svg)](https://badge.fury.io/py/autogen) [![Build](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml/badge.svg)](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml) ![Python Version](https://img.shields.io/badge/3.8%20%7C%203.9%20%7C%203.10%20%7C%203.11%20%7C%203.12-blue) @@ -8,14 +9,7 @@ - - - -
-AutoGen Logo - - -
+# [AG2](https://github.com/ag2ai/ag2) [📚 Cite paper](#related-papers). -> [!IMPORTANT] +> **:warning: IMPORTANT** +> +> :fire: :tada: **Nov 11, 2024:** We are evolving AutoGen into **AG2**! +> A new organization [AG2ai](https://github.com/ag2ai) is created to host the development of AG2 and related projects with open governance. Check [AG2's new look](https://ag2.ai/). > -> :fire: :tada: Nov 11, 2024: We are evolving AutoGen into AG2! A new organization [ag2ai](https://github.com/ag2ai) is created to host the development of AG2 and related projects with open governance. We invite collaborators from all organizations and individuals to join the development. +> We invite collaborators from all organizations and individuals to join the development. :fire: :tada: AG2 is available via `ag2` (or its alias `autogen` or `pyautogen`) on PyPI! Starting with version 0.3.2, you can now install AG2 using: @@ -98,7 +95,7 @@ AG2 (formerly AutoGen) is an open-source programming framework for building AI a The project is currently maintained by a [dynamic group of volunteers](MAINTAINERS.md) from several organizations. Contact project administrators Chi Wang and Qingyun Wu via [support@ag2.ai](mailto:support@ag2.ai) if you are interested in becoming a maintainer. -![AutoGen Overview](./website/static/img/autogen_agentchat.png) +![AutoGen Overview](https://github.com/ag2ai/ag2/tree/main/website/static/img/autogen_agentchat.png) @@ -199,7 +196,7 @@ python test/twoagent.py After the repo is cloned. The figure below shows an example conversation flow with AG2. -![Agent Chat Example](./website/static/img/chat_example.png) +![Agent Chat Example](https://github.com/ag2ai/ag2/tree/main/website/static/img/chat_example.png) Alternatively, the [sample code](https://github.com/ag2ai/build-with-ag2/blob/main/samples/simple_chat.py) here allows a user to chat with an AG2 agent in ChatGPT style. @@ -252,8 +249,6 @@ In addition, you can find: - [Contributing guide](https://ag2ai.github.io/ag2/docs/Contribute) -- [Roadmap](https://ag2.ai/#roadmap) and [Roadmap Issues](https://github.com/ag2ai/ag2/issues?q=is%3Aopen+is%3Aissue+label%3Aroadmap) -

↑ Back to Top ↑ From 27d457d1612b9701817a98feeb3c0cc32a55f9b9 Mon Sep 17 00:00:00 2001 From: Qingyun Wu Date: Thu, 28 Nov 2024 13:57:01 -0800 Subject: [PATCH 19/28] fix --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e0f56e8302..dc9723bbc5 100644 --- a/README.md +++ b/README.md @@ -17,7 +17,7 @@

--> -> **:warning: IMPORTANT** +> **:tada: IMPORTANT** > > :fire: :tada: **Nov 11, 2024:** We are evolving AutoGen into **AG2**! > A new organization [AG2ai](https://github.com/ag2ai) is created to host the development of AG2 and related projects with open governance. Check [AG2's new look](https://ag2.ai/). From ca42536389c88fdb2f95e8f72c55036c2388eca4 Mon Sep 17 00:00:00 2001 From: Qingyun Wu Date: Thu, 28 Nov 2024 16:41:40 -0800 Subject: [PATCH 20/28] email --- website/docs/contributor-guide/contributing.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/contributor-guide/contributing.md b/website/docs/contributor-guide/contributing.md index d9a1fb5c73..3ba84a6755 100644 --- a/website/docs/contributor-guide/contributing.md +++ b/website/docs/contributor-guide/contributing.md @@ -23,7 +23,7 @@ There is currently no formal reviewer solicitation process. Current reviewers id ## Contact Maintainers -The project is currently maintained by a [dynamic group of volunteers](https://github.com/ag2ai/ag2/blob/main/MAINTAINERS.md) from several organizations. Contact project administrators Chi Wang and Qingyun Wu via auto-gen@outlook.com if you are interested in becoming a maintainer. +The project is currently maintained by a [dynamic group of volunteers](https://github.com/ag2ai/ag2/blob/main/MAINTAINERS.md) from several organizations. Contact project administrators Chi Wang and Qingyun Wu via support@ag2.ai if you are interested in becoming a maintainer. ## License Headers From 731358768614f464f41adc37f3e1fedc40c26f56 Mon Sep 17 00:00:00 2001 From: Qingyun Wu Date: Thu, 28 Nov 2024 16:50:35 -0800 Subject: [PATCH 21/28] update fig --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index dc9723bbc5..22f425fd35 100644 --- a/README.md +++ b/README.md @@ -95,8 +95,7 @@ AG2 (formerly AutoGen) is an open-source programming framework for building AI a The project is currently maintained by a [dynamic group of volunteers](MAINTAINERS.md) from several organizations. Contact project administrators Chi Wang and Qingyun Wu via [support@ag2.ai](mailto:support@ag2.ai) if you are interested in becoming a maintainer. -![AutoGen Overview](https://github.com/ag2ai/ag2/tree/main/website/static/img/autogen_agentchat.png) - +![AutoGen Overview](https://raw.githubusercontent.com/ag2ai/ag2/main/website/static/img/autogen_agentchat.png)

@@ -196,7 +195,7 @@ python test/twoagent.py After the repo is cloned. The figure below shows an example conversation flow with AG2. -![Agent Chat Example](https://github.com/ag2ai/ag2/tree/main/website/static/img/chat_example.png) +![Agent Chat Example](https://raw.githubusercontent.com/ag2ai/ag2/tree/main/website/static/img/chat_example.png) Alternatively, the [sample code](https://github.com/ag2ai/build-with-ag2/blob/main/samples/simple_chat.py) here allows a user to chat with an AG2 agent in ChatGPT style. From d09b14884d333cd5fddc2443274dc4215b544780 Mon Sep 17 00:00:00 2001 From: Qingyun Wu Date: Thu, 28 Nov 2024 16:55:29 -0800 Subject: [PATCH 22/28] img link --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 22f425fd35..3dbb061510 100644 --- a/README.md +++ b/README.md @@ -95,7 +95,7 @@ AG2 (formerly AutoGen) is an open-source programming framework for building AI a The project is currently maintained by a [dynamic group of volunteers](MAINTAINERS.md) from several organizations. Contact project administrators Chi Wang and Qingyun Wu via [support@ag2.ai](mailto:support@ag2.ai) if you are interested in becoming a maintainer. -![AutoGen Overview](https://raw.githubusercontent.com/ag2ai/ag2/main/website/static/img/autogen_agentchat.png) +![AutoGen Overview](https://media.githubusercontent.com/media/ag2ai/ag2/refs/heads/main/website/static/img/autogen_agentchat.png)

@@ -195,7 +195,7 @@ python test/twoagent.py After the repo is cloned. The figure below shows an example conversation flow with AG2. -![Agent Chat Example](https://raw.githubusercontent.com/ag2ai/ag2/tree/main/website/static/img/chat_example.png) +![Agent Chat Example](https://media.githubusercontent.com/media/ag2ai/ag2/refs/heads/main/website/static/img/chat_example.png) Alternatively, the [sample code](https://github.com/ag2ai/build-with-ag2/blob/main/samples/simple_chat.py) here allows a user to chat with an AG2 agent in ChatGPT style. From f826c177fdaf4225747805ddef86beec05cc0e19 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Fri, 29 Nov 2024 03:20:55 +0000 Subject: [PATCH 23/28] Update underlying model to GPT 4o, updated notebook text Signed-off-by: Mark Sze --- .../graph_rag/falkor_graph_query_engine.py | 16 +-- .../graph_rag/falkor_graph_rag_capability.py | 18 ++- .../contrib/graph_rag/graph_query_engine.py | 2 +- .../contrib/graph_rag/graph_rag_capability.py | 10 +- notebook/agentchat_graph_rag_falkordb.ipynb | 132 ++++++++---------- 5 files changed, 86 insertions(+), 92 deletions(-) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py index b2940c99a6..bf67bd5571 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py @@ -20,7 +20,7 @@ class FalkorGraphQueryResult(GraphStoreQueryResult): class FalkorGraphQueryEngine: """ - This is a wrapper for Falkor DB KnowledgeGraph. + This is a wrapper for FalkorDB KnowledgeGraph. """ def __init__( @@ -30,11 +30,11 @@ def __init__( port: int = 6379, username: str | None = None, password: str | None = None, - model: str = "gpt-4-1106-preview", + model: str = "gpt-4o", schema: Schema | None = None, ): """ - Initialize a Falkor DB knowledge graph. + Initialize a FalkorDB knowledge graph. Please also refer to https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/kg.py Args: @@ -43,9 +43,9 @@ def __init__( port (int): FalkorDB port number. username (str|None): FalkorDB username. password (str|None): FalkorDB password. - model (str): OpenAI model to use for Falkor DB to build and retrieve from the graph. - schema: Falkor DB knowledge graph schema (ontology), https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/schema/schema.py - If None, Falkor DB will auto generate a schema from the input docs. + model (str): OpenAI model to use for FalkorDB to build and retrieve from the graph. + schema: FalkorDB knowledge graph schema (ontology), https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/schema/schema.py + If None, FalkorDB will auto generate a schema from the input docs. """ self.knowledge_graph = KnowledgeGraph(name, host, port, username, password, model, schema) @@ -62,11 +62,11 @@ def init_db(self, input_doc: List[Document] | None): self.knowledge_graph.process_sources(sources) def add_records(self, new_records: List) -> bool: - raise NotImplementedError("This method is not supported by Falkor DB SDK yet.") + raise NotImplementedError("This method is not supported by FalkorDB SDK yet.") def query(self, question: str, n_results: int = 1, **kwargs) -> FalkorGraphQueryResult: """ - Query the knowledage graph with a question and optional message history. + Query the knowledge graph with a question and optional message history. Args: question: a human input question. diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py index bf6ed5c9a0..842edc975d 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py @@ -12,7 +12,7 @@ class FalkorGraphRagCapability(GraphRagCapability): """ - The Falkor graph rag capability integrate FalkorDB graphrag_sdk version: 0.1.3b0. + The FalkorDB GraphRAG capability integrate FalkorDB with graphrag_sdk version: 0.1.3b0. Ref: https://github.com/FalkorDB/GraphRAG-SDK/tree/2-move-away-from-sql-to-json-ontology-detection For usage, please refer to example notebook/agentchat_graph_rag_falkordb.ipynb @@ -20,7 +20,7 @@ class FalkorGraphRagCapability(GraphRagCapability): def __init__(self, query_engine: FalkorGraphQueryEngine): """ - initialize graph rag capability with a graph query engine + initialize GraphRAG capability with a graph query engine """ self.query_engine = query_engine @@ -29,7 +29,7 @@ def __init__(self, query_engine: FalkorGraphQueryEngine): def add_to_agent(self, agent: UserProxyAgent): """ - Add FalkorDB graph RAG capability to a UserProxyAgent. + Add FalkorDB GraphRAG capability to a UserProxyAgent. The restriction to a UserProxyAgent to make sure the returned message does not contain information retrieved from the graph DB instead of any LLMs. """ self.graph_rag_agent = agent @@ -37,10 +37,11 @@ def add_to_agent(self, agent: UserProxyAgent): # Validate the agent config if agent.llm_config not in (None, False): raise Exception( - "Graph rag capability limits the query to graph DB, llm_config must be a dict or False or None." + "Agents with GraphRAG capabilities do not use an LLM configuration. Please set your llm_config to None or False." ) - # Register method to generate reply + # Register method to generate the reply using a FalkorDB query + # All other reply methods will be removed agent.register_reply( [ConversableAgent, None], self._reply_using_falkordb_query, position=0, remove_other_reply_funcs=True ) @@ -53,9 +54,11 @@ def _reply_using_falkordb_query( config: Optional[Any] = None, ) -> Tuple[bool, Union[str, Dict, None]]: """ - Query FalkorDB before return the message. Internally, it invokes the OpenAI assistant to generate a reply based on the given messages. + Query FalkorDB and return the message. Internally, it utilises OpenAI to generate a reply based on the given messages. The history with FalkorDB is also logged and updated. + If no results are found, a default message is returned: "I'm sorry, I don't have an answer for that." + Args: recipient: The agent instance that will receive the message. messages: A list of messages in the conversation history with the sender. @@ -67,10 +70,13 @@ def _reply_using_falkordb_query( """ question = self._get_last_question(messages[-1]) result: FalkorGraphQueryResult = self.query_engine.query(question, messages=self._history) + self._history = result.messages + return True, result.answer if result.answer else "I'm sorry, I don't have an answer for that." def _get_last_question(self, message: Union[Dict, str]): + """Retrieves the last message from the conversation history.""" if isinstance(message, str): return message if isinstance(message, Dict): diff --git a/autogen/agentchat/contrib/graph_rag/graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/graph_query_engine.py index 7f533fe98b..b15866f2db 100644 --- a/autogen/agentchat/contrib/graph_rag/graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/graph_query_engine.py @@ -26,7 +26,7 @@ class GraphStoreQueryResult: class GraphQueryEngine(Protocol): """An abstract base class that represents a graph query engine on top of a underlying graph database. - This interface defines the basic methods for graph rag. + This interface defines the basic methods for graph-based RAG. """ def init_db(self, input_doc: List[Document] | None = None): diff --git a/autogen/agentchat/contrib/graph_rag/graph_rag_capability.py b/autogen/agentchat/contrib/graph_rag/graph_rag_capability.py index 2819bcee8a..7e2ade403d 100644 --- a/autogen/agentchat/contrib/graph_rag/graph_rag_capability.py +++ b/autogen/agentchat/contrib/graph_rag/graph_rag_capability.py @@ -12,9 +12,9 @@ class GraphRagCapability(AgentCapability): """ - A graph rag capability uses a graph query engine to give a conversable agent the graph rag ability. + A graph-based RAG capability uses a graph query engine to give a conversable agent the graph-based RAG ability. - An agent class with graph rag capability could + An agent class with graph-based RAG capability could 1. create a graph in the underlying database with input documents. 2. retrieved relevant information based on messages received by the agent. 3. generate answers from retrieved information and send messages back. @@ -55,8 +55,10 @@ class GraphRagCapability(AgentCapability): def __init__(self, query_engine: GraphQueryEngine): """ - initialize graph rag capability with a graph query engine + Initialize graph-based RAG capability with a graph query engine """ ... - def add_to_agent(self, agent: ConversableAgent): ... + def add_to_agent(self, agent: ConversableAgent): + """Add the capability to an agent""" + ... diff --git a/notebook/agentchat_graph_rag_falkordb.ipynb b/notebook/agentchat_graph_rag_falkordb.ipynb index be72873b98..023a893d50 100644 --- a/notebook/agentchat_graph_rag_falkordb.ipynb +++ b/notebook/agentchat_graph_rag_falkordb.ipynb @@ -4,60 +4,23 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Using FalkorGraphRagCapability with UserProxyAgent for Graph RAG Question Answering\n", + "# Using FalkorGraphRagCapability with agents for GraphRAG Question & Answering\n", "\n", - "AutoGen provides graph rag integration with Agent Capability. This is an example to integrate FalkorDB (a Knowledge Graph Database)." + "AG2 provides GraphRAG integration using agent capabilities. This is an example to integrate FalkorDB (a Knowledge Graph database)." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Install Falkor DB SDK" + "## Install FalkorDB SDK" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "2254.85s - pydevd: Sending message related to process being replaced timed-out after 5 seconds\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Defaulting to user installation because normal site-packages is not writeable\n", - "Requirement already satisfied: graphrag_sdk==0.1.3b0 in /home/autogen/.local/lib/python3.11/site-packages (0.1.3b0)\n", - "Requirement already satisfied: bs4<0.0.3,>=0.0.2 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (0.0.2)\n", - "Requirement already satisfied: falkordb<2.0.0,>=1.0.4 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.0.10)\n", - "Requirement already satisfied: openai<2.0.0,>=1.30.3 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (1.55.0)\n", - "Requirement already satisfied: sqlalchemy<3.0.0,>=2.0.30 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (2.0.36)\n", - "Requirement already satisfied: typing-extensions<5.0.0,>=4.12.1 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.1.3b0) (4.12.2)\n", - "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.11/site-packages (from bs4<0.0.3,>=0.0.2->graphrag_sdk==0.1.3b0) (4.12.3)\n", - "Requirement already satisfied: redis<6.0.0,>=5.0.1 in /home/autogen/.local/lib/python3.11/site-packages (from falkordb<2.0.0,>=1.0.4->graphrag_sdk==0.1.3b0) (5.2.0)\n", - "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.6.2.post1)\n", - "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.9.0)\n", - "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.27.2)\n", - "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.7.1)\n", - "Requirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.10.9)\n", - "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.3.1)\n", - "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (4.67.1)\n", - "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.11/site-packages (from sqlalchemy<3.0.0,>=2.0.30->graphrag_sdk==0.1.3b0) (3.1.1)\n", - "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/site-packages (from anyio<5,>=3.5.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (3.10)\n", - "Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (2024.8.30)\n", - "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (1.0.7)\n", - "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai<2.0.0,>=1.30.3->graphrag_sdk==0.1.3b0) (0.14.0)\n", - "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.11/site-packages (from beautifulsoup4->bs4<0.0.3,>=0.0.2->graphrag_sdk==0.1.3b0) (2.6)\n", - "Note: you may need to restart the kernel to use updated packages.\n" - ] - } - ], + "outputs": [], "source": [ "%pip install graphrag_sdk==0.1.3b0" ] @@ -71,12 +34,24 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Set OpenAI API" + "## Set Configuration and OpenAI API Key\n", + "\n", + "In order to use FalkorDB you need to have an OpenAI key in your environment variable `OPENAI_API_KEY`.\n", + "\n", + "You can utilise an OAI_CONFIG_LIST file and extract the OpenAI API key and put it in the environment, as will be shown in the following cell.\n", + "\n", + "Alternatively, you can load the environment variable yourself.\n", + "\n", + "````{=mdx}\n", + ":::tip\n", + "Learn more about configuring LLMs for agents [here](/docs/topics/llm_configuration).\n", + ":::\n", + "````" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -85,6 +60,8 @@ "import autogen\n", "\n", "config_list = autogen.config_list_from_json(env_or_file=\"OAI_CONFIG_LIST\")\n", + "\n", + "# Put the OpenAI API key into the environment\n", "os.environ[\"OPENAI_API_KEY\"] = config_list[0][\"api_key\"]" ] }, @@ -92,17 +69,34 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Create Knowledge Graph with Your Own Data\n", + "````{=mdx}\n", + ":::important\n", + "The default model for loading graph data and answering questions using FalkorDB's SDK is OpenAI's GPT 4o and this can be changed by setting the `model` parameter on the FalkorGraphQueryEngine.\n", + ":::\n", + "````" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Create a Knowledge Graph with Your Own Data\n", + "\n", + "**Note:** You need to have a FalkorDB graph database running. If you are running one in a Docker container, please ensure your Docker network is setup to allow access to it.\n", "\n", - "Note that, you would need to have a Falkor DB running ready. If you use docker, please set up docker network properly.\n", + "In this example, the FalkorDB endpoint is set to host=\"192.168.0.1\" and port=6379, please adjust accordingly. For how to set up FalkorDB, please refer to https://docs.falkordb.com/\n", "\n", - "In this example, Falker DB endpint is already set at host=\"172.18.0.2\" and port=6379.\n", - "For how to set up Falkor DB, please refer to https://docs.falkordb.com/" + "\n", + "Below, we have some sample data from IMDB on the movie 'The Matrix'.\n", + "\n", + "We define the entities and attributes that we want in the graph database (`Actor` with `name` and `Movie` with `title`) and the relationships (Actors `ACTED` in Movies). This allows your RAG agent to answer questions about actors in the movie.\n", + "\n", + "We then initialise the database with that text document, creating the graph in FalkorDB." ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -121,8 +115,8 @@ "\n", "query_engine = FalkorGraphQueryEngine(\n", " name=\"IMDB\",\n", - " host=\"172.18.0.2\",\n", - " port=6379,\n", + " host=\"192.168.0.1\", # Change\n", + " port=6379, # if needed\n", " schema=movie_schema,\n", ")\n", "\n", @@ -135,12 +129,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Create a UserProxyAgent for FalkorDB and Answer Questions" + "## Add capability to a ConversableAgent and query them\n", + "\n", + "With FalkorDB setup, we can now associate the GraphRAG capability with a ConversableAgent and have a chat with it." ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -154,27 +150,12 @@ "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", - "The actors who've played in 'The Matrix' include:\n", - "\n", - "- Keanu Reeves\n", - "- Laurence Fishburne\n", - "- Carrie-Anne Moss\n", - "- Hugo Weaving\n", - "\n", - "--------------------------------------------------------------------------------\n", - "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", - "\n", - "List additional actors.\n", - "\n", - "--------------------------------------------------------------------------------\n", - "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", - "\n", - "It appears that the actors I initially provided exhaust the list available in our current knowledge graph. There are no additional actors listed for 'The Matrix' beyond Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving. If the film had more actors, they are not present in our graph at this time.\n", + "A few actors who have played in 'The Matrix' are Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving.\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", "\n", - "Who is Lana Wachowski?\n", + "Who else stared in The Matrix?\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", @@ -187,10 +168,10 @@ { "data": { "text/plain": [ - "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"The actors who've played in 'The Matrix' include:\\n\\n- Keanu Reeves\\n- Laurence Fishburne\\n- Carrie-Anne Moss\\n- Hugo Weaving\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'List additional actors.', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"It appears that the actors I initially provided exhaust the list available in our current knowledge graph. There are no additional actors listed for 'The Matrix' beyond Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving. If the film had more actors, they are not present in our graph at this time.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Who is Lana Wachowski?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"I'm sorry, I don't have an answer for that.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"I'm sorry, I don't have an answer for that.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['List additional actors.', 'Who is Lana Wachowski?', 'exit'])" + "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"A few actors who have played in 'The Matrix' are Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Who else stared in The Matrix?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"I'm sorry, I don't have an answer for that.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"I'm sorry, I don't have an answer for that.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['Who else stared in The Matrix?', 'exit'])" ] }, - "execution_count": 8, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -199,14 +180,17 @@ "from autogen import ConversableAgent, UserProxyAgent\n", "from autogen.agentchat.contrib.graph_rag.falkor_graph_rag_capability import FalkorGraphRagCapability\n", "\n", + "# Create a ConversableAgent (no LLM configuration)\n", "graph_rag_agent = ConversableAgent(\n", " name=\"matrix_agent\",\n", " human_input_mode=\"NEVER\",\n", ")\n", "\n", + "# Associate the capability with the agent\n", "graph_rag_capability = FalkorGraphRagCapability(query_engine)\n", "graph_rag_capability.add_to_agent(graph_rag_agent)\n", "\n", + "# Create a user proxy agent to converse with our RAG agent\n", "user_proxy = UserProxyAgent(\n", " name=\"user_proxy\",\n", " code_execution_config=False,\n", @@ -214,7 +198,9 @@ " human_input_mode=\"ALWAYS\",\n", ")\n", "\n", - "user_proxy.initiate_chat(graph_rag_agent, message=\"Name a few actors who've played in 'The Matrix'\")" + "user_proxy.initiate_chat(graph_rag_agent, message=\"Name a few actors who've played in 'The Matrix'\")\n", + "\n", + "# You will be prompted, as a human in the loop, after the response - feel free to ask more questions." ] } ], From 210cd24e720e9918a0a80d7a6d9f0654369b57c7 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Fri, 29 Nov 2024 03:27:13 +0000 Subject: [PATCH 24/28] Updated tests Signed-off-by: Mark Sze --- .github/workflows/contrib-graph-rag-tests.yml | 2 +- .../contrib/graph_rag/test_falkor_graph_rag.py | 11 ++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.github/workflows/contrib-graph-rag-tests.yml b/.github/workflows/contrib-graph-rag-tests.yml index 2df2d3c93a..080eea3b9f 100644 --- a/.github/workflows/contrib-graph-rag-tests.yml +++ b/.github/workflows/contrib-graph-rag-tests.yml @@ -43,7 +43,7 @@ jobs: run: | python -m pip install --upgrade pip wheel pip install pytest - - name: Install Falkor DB SDK when on linux + - name: Install FalkorDB SDK when on linux run: | pip install -e .[graph_rag_falkor_db] - name: Set AUTOGEN_USE_DOCKER based on OS diff --git a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py index 6f54126e3e..b680e50e08 100644 --- a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py +++ b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py @@ -33,8 +33,8 @@ ) def test_falkor_db_query_engine(): """ - Test Falkor DB Query Engine. - 1. create a test Falkor DB Query Engine with a schema. + Test FalkorDB Query Engine. + 1. create a test FalkorDB Query Engine with a schema. 2. Initialize it with an input txt file. 3. Query it with a question and verify the result contains the critical information. """ @@ -44,7 +44,12 @@ def test_falkor_db_query_engine(): movie = test_schema.add_entity("Movie").add_attribute("title", str, unique=True) test_schema.add_relation("ACTED", actor, movie) - query_engine = FalkorGraphQueryEngine(schema=test_schema) + query_engine = FalkorGraphQueryEngine( + name="IMDB", + # host="192.168.0.115", # Change + # port=6379, # if needed + schema=test_schema, + ) source_file = "test/agentchat/contrib/graph_rag/the_matrix.txt" input_docs = [Document(doctype=DocumentType.TEXT, path_or_url=source_file)] From 71ad707b0e5156c524a69148c4bba359e85303cd Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Fri, 29 Nov 2024 04:10:55 +0000 Subject: [PATCH 25/28] Support for latest GraphRAG 0.3.3 Signed-off-by: Mark Sze --- .../graph_rag/falkor_graph_query_engine.py | 40 +++++++++----- .../graph_rag/falkor_graph_rag_capability.py | 10 ++-- notebook/agentchat_graph_rag_falkordb.ipynb | 53 +++++++++++++------ setup.py | 2 +- 4 files changed, 68 insertions(+), 37 deletions(-) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py index bf67bd5571..9673cafea0 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py @@ -7,17 +7,14 @@ from typing import List from graphrag_sdk import KnowledgeGraph, Source -from graphrag_sdk.schema import Schema +from graphrag_sdk.model_config import KnowledgeGraphModelConfig +from graphrag_sdk.models.openai import OpenAiGenerativeModel +from graphrag_sdk.ontology import Ontology from .document import Document from .graph_query_engine import GraphStoreQueryResult -@dataclass -class FalkorGraphQueryResult(GraphStoreQueryResult): - messages: list = field(default_factory=list) - - class FalkorGraphQueryEngine: """ This is a wrapper for FalkorDB KnowledgeGraph. @@ -31,7 +28,7 @@ def __init__( username: str | None = None, password: str | None = None, model: str = "gpt-4o", - schema: Schema | None = None, + ontology: Ontology | None = None, ): """ Initialize a FalkorDB knowledge graph. @@ -44,10 +41,22 @@ def __init__( username (str|None): FalkorDB username. password (str|None): FalkorDB password. model (str): OpenAI model to use for FalkorDB to build and retrieve from the graph. - schema: FalkorDB knowledge graph schema (ontology), https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/schema/schema.py - If None, FalkorDB will auto generate a schema from the input docs. + ontology: FalkorDB knowledge graph schema/ontology, https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/schema/schema.py + If None, FalkorDB will auto generate an ontology from the input docs. """ - self.knowledge_graph = KnowledgeGraph(name, host, port, username, password, model, schema) + openai_model = OpenAiGenerativeModel(model) + self.knowledge_graph = KnowledgeGraph( + name=name, + host=host, + port=port, + username=username, + password=password, + model_config=KnowledgeGraphModelConfig.with_model(openai_model), + ontology=ontology, + ) + + # Establish a chat session, this will maintain the history + self._chat_session = self.knowledge_graph.chat_session() def init_db(self, input_doc: List[Document] | None): """ @@ -64,7 +73,7 @@ def init_db(self, input_doc: List[Document] | None): def add_records(self, new_records: List) -> bool: raise NotImplementedError("This method is not supported by FalkorDB SDK yet.") - def query(self, question: str, n_results: int = 1, **kwargs) -> FalkorGraphQueryResult: + def query(self, question: str, n_results: int = 1, **kwargs) -> GraphStoreQueryResult: """ Query the knowledge graph with a question and optional message history. @@ -76,6 +85,9 @@ def query(self, question: str, n_results: int = 1, **kwargs) -> FalkorGraphQuery Returns: FalkorGraphQueryResult """ - messages = kwargs.pop("messages", []) - answer, messages = self.knowledge_graph.ask(question, messages) - return FalkorGraphQueryResult(answer=answer, results=[], messages=messages) + response = self._chat_session.send_message(question) + + # History will be considered when querying by setting the last_answer + self._chat_session.last_answer = response["response"] + + return GraphStoreQueryResult(answer=response["response"], results=[]) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py index 842edc975d..7e8bf9cc90 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_rag_capability.py @@ -6,7 +6,8 @@ from autogen import Agent, ConversableAgent, UserProxyAgent -from .falkor_graph_query_engine import FalkorGraphQueryEngine, FalkorGraphQueryResult +from .falkor_graph_query_engine import FalkorGraphQueryEngine +from .graph_query_engine import GraphStoreQueryResult from .graph_rag_capability import GraphRagCapability @@ -24,9 +25,6 @@ def __init__(self, query_engine: FalkorGraphQueryEngine): """ self.query_engine = query_engine - # Graph DB query history. - self._history = [] - def add_to_agent(self, agent: UserProxyAgent): """ Add FalkorDB GraphRAG capability to a UserProxyAgent. @@ -69,9 +67,7 @@ def _reply_using_falkordb_query( A tuple containing a boolean indicating success and the assistant's reply. """ question = self._get_last_question(messages[-1]) - result: FalkorGraphQueryResult = self.query_engine.query(question, messages=self._history) - - self._history = result.messages + result: GraphStoreQueryResult = self.query_engine.query(question) return True, result.answer if result.answer else "I'm sorry, I don't have an answer for that." diff --git a/notebook/agentchat_graph_rag_falkordb.ipynb b/notebook/agentchat_graph_rag_falkordb.ipynb index 023a893d50..8af8cc52ca 100644 --- a/notebook/agentchat_graph_rag_falkordb.ipynb +++ b/notebook/agentchat_graph_rag_falkordb.ipynb @@ -22,7 +22,7 @@ "metadata": {}, "outputs": [], "source": [ - "%pip install graphrag_sdk==0.1.3b0" + "%pip install graphrag_sdk==0.3.3" ] }, { @@ -51,9 +51,18 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 1, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/usr/local/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], "source": [ "import os\n", "\n", @@ -100,7 +109,7 @@ "metadata": {}, "outputs": [], "source": [ - "from graphrag_sdk.schema import Schema\n", + "from graphrag_sdk import Attribute, AttributeType, Entity, Ontology, Relation\n", "\n", "from autogen.agentchat.contrib.graph_rag.document import Document, DocumentType\n", "from autogen.agentchat.contrib.graph_rag.falkor_graph_query_engine import FalkorGraphQueryEngine\n", @@ -108,16 +117,20 @@ "# Auto generate graph schema from unstructured data\n", "input_path = \"../test/agentchat/contrib/graph_rag/the_matrix.txt\"\n", "\n", - "movie_schema = Schema()\n", - "actor = movie_schema.add_entity(\"Actor\").add_attribute(\"name\", str, unique=True)\n", - "movie = movie_schema.add_entity(\"Movie\").add_attribute(\"title\", str, unique=True)\n", - "movie_schema.add_relation(\"ACTED\", actor, movie)\n", + "movie_ontology = Ontology()\n", + "movie_ontology.add_entity(\n", + " Entity(label=\"Actor\", attributes=[Attribute(name=\"name\", attr_type=AttributeType.STRING, unique=True)])\n", + ")\n", + "movie_ontology.add_entity(\n", + " Entity(label=\"Movie\", attributes=[Attribute(name=\"title\", attr_type=AttributeType.STRING, unique=True)])\n", + ")\n", + "movie_ontology.add_relation(Relation(label=\"ACTED\", source=\"Actor\", target=\"Movie\"))\n", "\n", "query_engine = FalkorGraphQueryEngine(\n", " name=\"IMDB\",\n", " host=\"192.168.0.1\", # Change\n", " port=6379, # if needed\n", - " schema=movie_schema,\n", + " ontology=movie_ontology,\n", ")\n", "\n", "input_documents = [Document(doctype=DocumentType.TEXT, path_or_url=input_path)]\n", @@ -136,7 +149,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -150,17 +163,27 @@ "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", - "A few actors who have played in 'The Matrix' are Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving.\n", + "Keanu Reeves, Laurence Fishburne, and Carrie-Anne Moss are a few actors who've played in 'The Matrix'.\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", + "\n", + "Anyone else?\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", + "\n", + "Hugo Weaving, Lilly Wachowski, and Lana Wachowski are other individuals associated with 'The Matrix'.\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", "\n", - "Who else stared in The Matrix?\n", + "Anyone else?\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", - "I'm sorry, I don't have an answer for that.\n", + "No, there are no other actors associated with 'The Matrix' besides those already mentioned.\n", "\n", "--------------------------------------------------------------------------------\n" ] @@ -168,10 +191,10 @@ { "data": { "text/plain": [ - "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"A few actors who have played in 'The Matrix' are Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Who else stared in The Matrix?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"I'm sorry, I don't have an answer for that.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"I'm sorry, I don't have an answer for that.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['Who else stared in The Matrix?', 'exit'])" + "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Keanu Reeves, Laurence Fishburne, and Carrie-Anne Moss are a few actors who've played in 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Anyone else?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Hugo Weaving, Lilly Wachowski, and Lana Wachowski are other individuals associated with 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Anyone else?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"No, there are no other actors associated with 'The Matrix' besides those already mentioned.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"No, there are no other actors associated with 'The Matrix' besides those already mentioned.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['Anyone else?', 'Anyone else?', 'exit'])" ] }, - "execution_count": 5, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } diff --git a/setup.py b/setup.py index a77fbce05c..79336ab8a5 100644 --- a/setup.py +++ b/setup.py @@ -61,7 +61,7 @@ retrieve_chat_pgvector = [*retrieve_chat, "pgvector>=0.2.5"] graph_rag_falkor_db = [ - "graphrag_sdk==0.1.3b0", + "graphrag_sdk==0.3.3", ] if current_os in ["Windows", "Darwin"]: From 94cbfcaa113cffbe7d88cc12aa20661e89043617 Mon Sep 17 00:00:00 2001 From: Mark Sze Date: Fri, 29 Nov 2024 04:26:18 +0000 Subject: [PATCH 26/28] Updated test for GraphRAG SDK 0.3.3 Signed-off-by: Mark Sze --- .../graph_rag/test_falkor_graph_rag.py | 27 ++++++++----------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py index b680e50e08..0e23414b43 100644 --- a/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py +++ b/test/agentchat/contrib/graph_rag/test_falkor_graph_rag.py @@ -8,13 +8,10 @@ import pytest from conftest import reason, skip_openai # noqa: E402 -from graphrag_sdk.schema import Schema +from graphrag_sdk import Attribute, AttributeType, Entity, Ontology, Relation try: - from autogen.agentchat.contrib.graph_rag.document import ( - Document, - DocumentType, - ) + from autogen.agentchat.contrib.graph_rag.document import Document, DocumentType from autogen.agentchat.contrib.graph_rag.falkor_graph_query_engine import ( FalkorGraphQueryEngine, GraphStoreQueryResult, @@ -39,16 +36,20 @@ def test_falkor_db_query_engine(): 3. Query it with a question and verify the result contains the critical information. """ # Arrange - test_schema = Schema() - actor = test_schema.add_entity("Actor").add_attribute("name", str, unique=True) - movie = test_schema.add_entity("Movie").add_attribute("title", str, unique=True) - test_schema.add_relation("ACTED", actor, movie) + movie_ontology = Ontology() + movie_ontology.add_entity( + Entity(label="Actor", attributes=[Attribute(name="name", attr_type=AttributeType.STRING, unique=True)]) + ) + movie_ontology.add_entity( + Entity(label="Movie", attributes=[Attribute(name="title", attr_type=AttributeType.STRING, unique=True)]) + ) + movie_ontology.add_relation(Relation(label="ACTED", source="Actor", target="Movie")) query_engine = FalkorGraphQueryEngine( name="IMDB", # host="192.168.0.115", # Change # port=6379, # if needed - schema=test_schema, + ontology=movie_ontology, ) source_file = "test/agentchat/contrib/graph_rag/the_matrix.txt" @@ -63,9 +64,3 @@ def test_falkor_db_query_engine(): # Assert assert query_result.answer.find("Keanu Reeves") >= 0 - for message in query_result.messages: - if isinstance(message, dict) and "role" in message and message["role"] == "user": - assert "content" in message - assert message["content"] is question - return - pytest.fail("Question not found in message history.") From 137abdac43a407c19b795ef7b0e4547f69f22215 Mon Sep 17 00:00:00 2001 From: AgentGenie Date: Thu, 28 Nov 2024 23:37:35 -0800 Subject: [PATCH 27/28] Update the default model arg for FalkorDB Query Engine --- .../contrib/graph_rag/falkor_graph_query_engine.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py index 9673cafea0..76ed7f5929 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py @@ -8,6 +8,7 @@ from graphrag_sdk import KnowledgeGraph, Source from graphrag_sdk.model_config import KnowledgeGraphModelConfig +from graphrag_sdk.models import GenerativeModel from graphrag_sdk.models.openai import OpenAiGenerativeModel from graphrag_sdk.ontology import Ontology @@ -27,12 +28,12 @@ def __init__( port: int = 6379, username: str | None = None, password: str | None = None, - model: str = "gpt-4o", + model: GenerativeModel = OpenAiGenerativeModel("gpt-4o"), ontology: Ontology | None = None, ): """ Initialize a FalkorDB knowledge graph. - Please also refer to https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/kg.py + Please also refer to https://github.com/FalkorDB/GraphRAG-SDK/blob/main/graphrag_sdk/kg.py Args: name (str): Knowledge graph name. @@ -40,18 +41,18 @@ def __init__( port (int): FalkorDB port number. username (str|None): FalkorDB username. password (str|None): FalkorDB password. - model (str): OpenAI model to use for FalkorDB to build and retrieve from the graph. - ontology: FalkorDB knowledge graph schema/ontology, https://github.com/FalkorDB/GraphRAG-SDK/blob/2-move-away-from-sql-to-json-ontology-detection/graphrag_sdk/schema/schema.py + model (GenerativeModel): LLM model to use for FalkorDB to build and retrieve from the graph, default to use OAI gpt-4o. + ontology: FalkorDB knowledge graph schema/ontology, https://github.com/FalkorDB/GraphRAG-SDK/blob/main/graphrag_sdk/ontology.py If None, FalkorDB will auto generate an ontology from the input docs. """ - openai_model = OpenAiGenerativeModel(model) + self.knowledge_graph = KnowledgeGraph( name=name, host=host, port=port, username=username, password=password, - model_config=KnowledgeGraphModelConfig.with_model(openai_model), + model_config=KnowledgeGraphModelConfig.with_model(model), ontology=ontology, ) From 4cf0e1aa7155d66000f106f2f7cd9f329bf018ae Mon Sep 17 00:00:00 2001 From: AgentGenie Date: Fri, 29 Nov 2024 00:27:58 -0800 Subject: [PATCH 28/28] Enable auto generate graph ontology and add an example --- .../graph_rag/falkor_graph_query_engine.py | 43 +++- notebook/agentchat_graph_rag_falkordb.ipynb | 239 +++++++++++++++--- 2 files changed, 237 insertions(+), 45 deletions(-) diff --git a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py index 76ed7f5929..114e6d8ed3 100644 --- a/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +++ b/autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py @@ -45,19 +45,14 @@ def __init__( ontology: FalkorDB knowledge graph schema/ontology, https://github.com/FalkorDB/GraphRAG-SDK/blob/main/graphrag_sdk/ontology.py If None, FalkorDB will auto generate an ontology from the input docs. """ - - self.knowledge_graph = KnowledgeGraph( - name=name, - host=host, - port=port, - username=username, - password=password, - model_config=KnowledgeGraphModelConfig.with_model(model), - ontology=ontology, - ) - - # Establish a chat session, this will maintain the history - self._chat_session = self.knowledge_graph.chat_session() + self.name = name + self.host = host + self.port = port + self.username = username + self.password = password + self.model = model + self.model_config = KnowledgeGraphModelConfig.with_model(model) + self.ontology = ontology def init_db(self, input_doc: List[Document] | None): """ @@ -69,6 +64,25 @@ def init_db(self, input_doc: List[Document] | None): sources.append(Source(doc.path_or_url)) if sources: + # Auto generate graph ontology if not created by user. + if self.ontology is None: + self.ontology = Ontology.from_sources( + sources=sources, + model=self.model, + ) + + self.knowledge_graph = KnowledgeGraph( + name=self.name, + host=self.host, + port=self.port, + username=self.username, + password=self.password, + model_config=KnowledgeGraphModelConfig.with_model(self.model), + ontology=self.ontology, + ) + + # Establish a chat session, this will maintain the history + self._chat_session = self.knowledge_graph.chat_session() self.knowledge_graph.process_sources(sources) def add_records(self, new_records: List) -> bool: @@ -86,6 +100,9 @@ def query(self, question: str, n_results: int = 1, **kwargs) -> GraphStoreQueryR Returns: FalkorGraphQueryResult """ + if self.knowledge_graph is None: + raise ValueError("Knowledge graph is not created.") + response = self._chat_session.send_message(question) # History will be considered when querying by setting the last_answer diff --git a/notebook/agentchat_graph_rag_falkordb.ipynb b/notebook/agentchat_graph_rag_falkordb.ipynb index 8af8cc52ca..9d85924569 100644 --- a/notebook/agentchat_graph_rag_falkordb.ipynb +++ b/notebook/agentchat_graph_rag_falkordb.ipynb @@ -18,18 +18,112 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Defaulting to user installation because normal site-packages is not writeable\n", + "Requirement already satisfied: graphrag_sdk==0.3.3 in /home/autogen/.local/lib/python3.11/site-packages (0.3.3)\n", + "Requirement already satisfied: backoff<3.0.0,>=2.2.1 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (2.2.1)\n", + "Requirement already satisfied: bs4<0.0.3,>=0.0.2 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (0.0.2)\n", + "Requirement already satisfied: falkordb<2.0.0,>=1.0.9 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (1.0.10)\n", + "Requirement already satisfied: fix-busted-json<0.0.19,>=0.0.18 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (0.0.18)\n", + "Requirement already satisfied: ipykernel<7.0.0,>=6.29.5 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (6.29.5)\n", + "Requirement already satisfied: pypdf<5.0.0,>=4.2.0 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (4.3.1)\n", + "Requirement already satisfied: python-abc<0.3.0,>=0.2.0 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (0.2.0)\n", + "Requirement already satisfied: python-dotenv<2.0.0,>=1.0.1 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (1.0.1)\n", + "Requirement already satisfied: ratelimit<3.0.0,>=2.2.1 in /home/autogen/.local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (2.2.1)\n", + "Requirement already satisfied: requests<3.0.0,>=2.32.3 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (2.32.3)\n", + "Requirement already satisfied: typing-extensions<5.0.0,>=4.12.1 in /usr/local/lib/python3.11/site-packages (from graphrag_sdk==0.3.3) (4.12.2)\n", + "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.11/site-packages (from bs4<0.0.3,>=0.0.2->graphrag_sdk==0.3.3) (4.12.3)\n", + "Requirement already satisfied: redis<6.0.0,>=5.0.1 in /home/autogen/.local/lib/python3.11/site-packages (from falkordb<2.0.0,>=1.0.9->graphrag_sdk==0.3.3) (5.2.0)\n", + "Requirement already satisfied: comm>=0.1.1 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.2.2)\n", + "Requirement already satisfied: debugpy>=1.6.5 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (1.8.9)\n", + "Requirement already satisfied: ipython>=7.23.1 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (8.29.0)\n", + "Requirement already satisfied: jupyter-client>=6.1.12 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (8.6.3)\n", + "Requirement already satisfied: jupyter-core!=5.0.*,>=4.12 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (5.7.2)\n", + "Requirement already satisfied: matplotlib-inline>=0.1 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.1.7)\n", + "Requirement already satisfied: nest-asyncio in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (1.6.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (24.2)\n", + "Requirement already satisfied: psutil in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (6.1.0)\n", + "Requirement already satisfied: pyzmq>=24 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (26.2.0)\n", + "Requirement already satisfied: tornado>=6.1 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (6.4.2)\n", + "Requirement already satisfied: traitlets>=5.4.0 in /usr/local/lib/python3.11/site-packages (from ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (5.14.3)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/site-packages (from requests<3.0.0,>=2.32.3->graphrag_sdk==0.3.3) (3.4.0)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.11/site-packages (from requests<3.0.0,>=2.32.3->graphrag_sdk==0.3.3) (3.10)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.11/site-packages (from requests<3.0.0,>=2.32.3->graphrag_sdk==0.3.3) (2.2.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.11/site-packages (from requests<3.0.0,>=2.32.3->graphrag_sdk==0.3.3) (2024.8.30)\n", + "Requirement already satisfied: decorator in /usr/local/lib/python3.11/site-packages (from ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (5.1.1)\n", + "Requirement already satisfied: jedi>=0.16 in /usr/local/lib/python3.11/site-packages (from ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.19.2)\n", + "Requirement already satisfied: prompt-toolkit<3.1.0,>=3.0.41 in /usr/local/lib/python3.11/site-packages (from ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (3.0.48)\n", + "Requirement already satisfied: pygments>=2.4.0 in /usr/local/lib/python3.11/site-packages (from ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (2.18.0)\n", + "Requirement already satisfied: stack-data in /usr/local/lib/python3.11/site-packages (from ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.6.3)\n", + "Requirement already satisfied: pexpect>4.3 in /usr/local/lib/python3.11/site-packages (from ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (4.9.0)\n", + "Requirement already satisfied: python-dateutil>=2.8.2 in /usr/local/lib/python3.11/site-packages (from jupyter-client>=6.1.12->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (2.9.0.post0)\n", + "Requirement already satisfied: platformdirs>=2.5 in /usr/local/lib/python3.11/site-packages (from jupyter-core!=5.0.*,>=4.12->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (4.3.6)\n", + "Requirement already satisfied: soupsieve>1.2 in /usr/local/lib/python3.11/site-packages (from beautifulsoup4->bs4<0.0.3,>=0.0.2->graphrag_sdk==0.3.3) (2.6)\n", + "Requirement already satisfied: parso<0.9.0,>=0.8.4 in /usr/local/lib/python3.11/site-packages (from jedi>=0.16->ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.8.4)\n", + "Requirement already satisfied: ptyprocess>=0.5 in /usr/local/lib/python3.11/site-packages (from pexpect>4.3->ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.7.0)\n", + "Requirement already satisfied: wcwidth in /usr/local/lib/python3.11/site-packages (from prompt-toolkit<3.1.0,>=3.0.41->ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.2.13)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.11/site-packages (from python-dateutil>=2.8.2->jupyter-client>=6.1.12->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (1.16.0)\n", + "Requirement already satisfied: executing>=1.2.0 in /usr/local/lib/python3.11/site-packages (from stack-data->ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (2.1.0)\n", + "Requirement already satisfied: asttokens>=2.1.0 in /usr/local/lib/python3.11/site-packages (from stack-data->ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (2.4.1)\n", + "Requirement already satisfied: pure-eval in /usr/local/lib/python3.11/site-packages (from stack-data->ipython>=7.23.1->ipykernel<7.0.0,>=6.29.5->graphrag_sdk==0.3.3) (0.2.3)\n", + "Note: you may need to restart the kernel to use updated packages.\n", + "Defaulting to user installation because normal site-packages is not writeable\n", + "Obtaining file:///workspaces/ag2\n", + " Installing build dependencies ... \u001b[?25ldone\n", + "\u001b[?25h Checking if build backend supports build_editable ... \u001b[?25ldone\n", + "\u001b[?25h Getting requirements to build editable ... \u001b[?25ldone\n", + "\u001b[?25h Preparing editable metadata (pyproject.toml) ... \u001b[?25ldone\n", + "\u001b[?25hRequirement already satisfied: openai>=1.3 in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (1.55.0)\n", + "Requirement already satisfied: diskcache in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (5.6.3)\n", + "Requirement already satisfied: termcolor in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (2.5.0)\n", + "Requirement already satisfied: flaml in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (2.3.2)\n", + "Requirement already satisfied: numpy<2.0.0,>=1.24.0 in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (1.26.4)\n", + "Requirement already satisfied: python-dotenv in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (1.0.1)\n", + "Requirement already satisfied: tiktoken in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (0.8.0)\n", + "Requirement already satisfied: pydantic!=2.6.0,<3,>=1.10 in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (1.10.9)\n", + "Requirement already satisfied: docker in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (7.1.0)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.11/site-packages (from ag2==0.4) (24.2)\n", + "Requirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (4.6.2.post1)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (1.9.0)\n", + "Requirement already satisfied: httpx<1,>=0.23.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (0.27.2)\n", + "Requirement already satisfied: jiter<1,>=0.4.0 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (0.7.1)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (1.3.1)\n", + "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (4.67.1)\n", + "Requirement already satisfied: typing-extensions<5,>=4.11 in /usr/local/lib/python3.11/site-packages (from openai>=1.3->ag2==0.4) (4.12.2)\n", + "Requirement already satisfied: requests>=2.26.0 in /usr/local/lib/python3.11/site-packages (from docker->ag2==0.4) (2.32.3)\n", + "Requirement already satisfied: urllib3>=1.26.0 in /usr/local/lib/python3.11/site-packages (from docker->ag2==0.4) (2.2.3)\n", + "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.11/site-packages (from tiktoken->ag2==0.4) (2024.11.6)\n", + "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.11/site-packages (from anyio<5,>=3.5.0->openai>=1.3->ag2==0.4) (3.10)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai>=1.3->ag2==0.4) (2024.8.30)\n", + "Requirement already satisfied: httpcore==1.* in /usr/local/lib/python3.11/site-packages (from httpx<1,>=0.23.0->openai>=1.3->ag2==0.4) (1.0.7)\n", + "Requirement already satisfied: h11<0.15,>=0.13 in /usr/local/lib/python3.11/site-packages (from httpcore==1.*->httpx<1,>=0.23.0->openai>=1.3->ag2==0.4) (0.14.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.11/site-packages (from requests>=2.26.0->docker->ag2==0.4) (3.4.0)\n", + "Building wheels for collected packages: ag2\n", + " Building editable for ag2 (pyproject.toml) ... \u001b[?25ldone\n", + "\u001b[?25h Created wheel for ag2: filename=ag2-0.4-0.editable-py3-none-any.whl size=15779 sha256=6352d50605c2b333aabeb86bee921ade8e34d3429ed67f175db6d0d23dcacd05\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-62aq_s_d/wheels/81/96/a6/c21a93c37cecb7b27b25b264f04fa27be94acd8d0f5e637527\n", + "Successfully built ag2\n", + "Installing collected packages: ag2\n", + " Attempting uninstall: ag2\n", + " Found existing installation: ag2 0.4\n", + " Uninstalling ag2-0.4:\n", + " Successfully uninstalled ag2-0.4\n", + "Successfully installed ag2-0.4\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], "source": [ - "%pip install graphrag_sdk==0.3.3" + "%pip install graphrag_sdk==0.3.3\n", + "%pip install -e /workspaces/ag2" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [] - }, { "cell_type": "markdown", "metadata": {}, @@ -51,24 +145,15 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 3, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/usr/local/lib/python3.11/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", - " from .autonotebook import tqdm as notebook_tqdm\n" - ] - } - ], + "outputs": [], "source": [ "import os\n", "\n", "import autogen\n", "\n", - "config_list = autogen.config_list_from_json(env_or_file=\"OAI_CONFIG_LIST\")\n", + "config_list = autogen.config_list_from_json(env_or_file=\"../OAI_CONFIG_LIST\")\n", "\n", "# Put the OpenAI API key into the environment\n", "os.environ[\"OPENAI_API_KEY\"] = config_list[0][\"api_key\"]" @@ -93,20 +178,105 @@ "\n", "**Note:** You need to have a FalkorDB graph database running. If you are running one in a Docker container, please ensure your Docker network is setup to allow access to it.\n", "\n", - "In this example, the FalkorDB endpoint is set to host=\"192.168.0.1\" and port=6379, please adjust accordingly. For how to set up FalkorDB, please refer to https://docs.falkordb.com/\n", + "In this example, the FalkorDB endpoint is set to host=\"172.18.0.3\" and port=6379, please adjust accordingly. For how to set up FalkorDB, please refer to https://docs.falkordb.com/\n", "\n", "\n", "Below, we have some sample data from IMDB on the movie 'The Matrix'.\n", "\n", - "We define the entities and attributes that we want in the graph database (`Actor` with `name` and `Movie` with `title`) and the relationships (Actors `ACTED` in Movies). This allows your RAG agent to answer questions about actors in the movie.\n", - "\n", "We then initialise the database with that text document, creating the graph in FalkorDB." ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### A Simple Example\n", + "In this example, the graph ontology is auto-generated." + ] + }, { "cell_type": "code", "execution_count": null, "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", + "\n", + "Name a few actors who've played in 'The Matrix'\n", + "\n", + "--------------------------------------------------------------------------------\n", + "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", + "\n", + "Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving all played in 'The Matrix'.\n", + "\n", + "--------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving all played in 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving all played in 'The Matrix'.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['exit'])" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from autogen import ConversableAgent, UserProxyAgent\n", + "from autogen.agentchat.contrib.graph_rag.document import Document, DocumentType\n", + "from autogen.agentchat.contrib.graph_rag.falkor_graph_query_engine import FalkorGraphQueryEngine\n", + "from autogen.agentchat.contrib.graph_rag.falkor_graph_rag_capability import FalkorGraphRagCapability\n", + "\n", + "# Auto generate graph schema from unstructured data\n", + "input_path = \"../test/agentchat/contrib/graph_rag/the_matrix.txt\"\n", + "input_documents = [Document(doctype=DocumentType.TEXT, path_or_url=input_path)]\n", + "\n", + "# Create FalkorGraphQueryEngine\n", + "query_engine = FalkorGraphQueryEngine(\n", + " name=\"The_Matrix_Auto\",\n", + " host=\"172.18.0.3\", # Change\n", + " port=6379, # if needed\n", + ")\n", + "\n", + "# Ingest data and initialize the database\n", + "query_engine.init_db(input_doc=input_documents)\n", + "\n", + "# Create a ConversableAgent (no LLM configuration)\n", + "graph_rag_agent = ConversableAgent(\n", + " name=\"matrix_agent\",\n", + " human_input_mode=\"NEVER\",\n", + ")\n", + "\n", + "# Associate the capability with the agent\n", + "graph_rag_capability = FalkorGraphRagCapability(query_engine)\n", + "graph_rag_capability.add_to_agent(graph_rag_agent)\n", + "\n", + "# Create a user proxy agent to converse with our RAG agent\n", + "user_proxy = UserProxyAgent(\n", + " name=\"user_proxy\",\n", + " human_input_mode=\"ALWAYS\",\n", + ")\n", + "\n", + "user_proxy.initiate_chat(graph_rag_agent, message=\"Name a few actors who've played in 'The Matrix'\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Revisit the exmple with self defined graph ontology.\n", + "We define the entities and attributes that we want in the graph database (`Actor` with `name` and `Movie` with `title`) and the relationships (Actors `ACTED` in Movies). This allows your RAG agent to answer questions about actors in the movie." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, "outputs": [], "source": [ "from graphrag_sdk import Attribute, AttributeType, Entity, Ontology, Relation\n", @@ -114,7 +284,6 @@ "from autogen.agentchat.contrib.graph_rag.document import Document, DocumentType\n", "from autogen.agentchat.contrib.graph_rag.falkor_graph_query_engine import FalkorGraphQueryEngine\n", "\n", - "# Auto generate graph schema from unstructured data\n", "input_path = \"../test/agentchat/contrib/graph_rag/the_matrix.txt\"\n", "\n", "movie_ontology = Ontology()\n", @@ -128,7 +297,7 @@ "\n", "query_engine = FalkorGraphQueryEngine(\n", " name=\"IMDB\",\n", - " host=\"192.168.0.1\", # Change\n", + " host=\"172.18.0.3\", # Change\n", " port=6379, # if needed\n", " ontology=movie_ontology,\n", ")\n", @@ -157,33 +326,39 @@ "output_type": "stream", "text": [ "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", - "\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ "Name a few actors who've played in 'The Matrix'\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", - "Keanu Reeves, Laurence Fishburne, and Carrie-Anne Moss are a few actors who've played in 'The Matrix'.\n", + "Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving have played in 'The Matrix'.\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", "\n", - "Anyone else?\n", + "List additional actors.\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", - "Hugo Weaving, Lilly Wachowski, and Lana Wachowski are other individuals associated with 'The Matrix'.\n", + "The actors Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving are listed. There are no additional actors provided in the context related to 'The Matrix'.\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33muser_proxy\u001b[0m (to matrix_agent):\n", "\n", - "Anyone else?\n", + "List additional people associated with the movie.\n", "\n", "--------------------------------------------------------------------------------\n", "\u001b[33mmatrix_agent\u001b[0m (to user_proxy):\n", "\n", - "No, there are no other actors associated with 'The Matrix' besides those already mentioned.\n", + "I am sorry, I could not find the answer to your question\n", "\n", "--------------------------------------------------------------------------------\n" ] @@ -191,7 +366,7 @@ { "data": { "text/plain": [ - "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Keanu Reeves, Laurence Fishburne, and Carrie-Anne Moss are a few actors who've played in 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Anyone else?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Hugo Weaving, Lilly Wachowski, and Lana Wachowski are other individuals associated with 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'Anyone else?', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"No, there are no other actors associated with 'The Matrix' besides those already mentioned.\", 'role': 'user', 'name': 'matrix_agent'}], summary=\"No, there are no other actors associated with 'The Matrix' besides those already mentioned.\", cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['Anyone else?', 'Anyone else?', 'exit'])" + "ChatResult(chat_id=None, chat_history=[{'content': \"Name a few actors who've played in 'The Matrix'\", 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving have played in 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'List additional actors.', 'role': 'assistant', 'name': 'user_proxy'}, {'content': \"The actors Keanu Reeves, Laurence Fishburne, Carrie-Anne Moss, and Hugo Weaving are listed. There are no additional actors provided in the context related to 'The Matrix'.\", 'role': 'user', 'name': 'matrix_agent'}, {'content': 'List additional people associated with the movie.', 'role': 'assistant', 'name': 'user_proxy'}, {'content': 'I am sorry, I could not find the answer to your question', 'role': 'user', 'name': 'matrix_agent'}], summary='I am sorry, I could not find the answer to your question', cost={'usage_including_cached_inference': {'total_cost': 0}, 'usage_excluding_cached_inference': {'total_cost': 0}}, human_input=['List additional actors.', 'List additional people associated with the movie.', 'exit'])" ] }, "execution_count": 3,