Skip to content

Commit

Permalink
Revert "feat: add local langfuse tracing option (#106)"
Browse files Browse the repository at this point in the history
This reverts commit 56d88a8.
  • Loading branch information
salman1993 committed Oct 10, 2024
1 parent 56d88a8 commit c076b2c
Show file tree
Hide file tree
Showing 21 changed files with 10 additions and 387 deletions.
23 changes: 1 addition & 22 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ jobs:
uv run pytest tests -m 'not integration'
goose:
runs-on: ubuntu-latest
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
Expand All @@ -48,27 +48,6 @@ jobs:
run: |
uv run pytest tests -m 'not integration'
langfuse-wrapper:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4

- name: Install UV
run: curl -LsSf https://astral.sh/uv/install.sh | sh

- name: Source Cargo Environment
run: source $HOME/.cargo/env

- name: Ruff
run: |
uvx ruff check packages/langfuse-wrapper
uvx ruff format packages/langfuse-wrapper --check
- name: Run tests
working-directory: ./packages/langfuse-wrapper
run: |
uv run pytest tests -m 'not integration'
# This runs integration tests of the OpenAI API, using Ollama to host models.
# This lets us test PRs from forks which can't access secrets like API keys.
Expand Down
3 changes: 0 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,3 @@ docs/docs/reference

# uv lock file
uv.lock

# langfuse docker file
**/packages/langfuse-wrapper/scripts/docker-compose.yaml
9 changes: 0 additions & 9 deletions packages/exchange/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@ dependencies = [
"tiktoken>=0.7.0",
"httpx>=0.27.0",
"tenacity>=9.0.0",
"python-dotenv>=1.0.1",
"langfuse-wrapper"
]

[tool.hatch.build.targets.wheel]
Expand Down Expand Up @@ -49,10 +47,3 @@ ai-exchange = "exchange:module_name"
markers = [
"integration: marks tests that need to authenticate (deselect with '-m \"not integration\"')",
]

[tool.uv.sources]
langfuse-wrapper = { workspace = true}

[tool.uv.workspace]
members = ["../langfuse-wrapper"]

2 changes: 0 additions & 2 deletions packages/exchange/src/exchange/exchange.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@
from exchange.providers import Provider, Usage
from exchange.tool import Tool
from exchange.token_usage_collector import _token_usage_collector
from langfuse_wrapper.langfuse_wrapper import observe_wrapper


def validate_tool_output(output: str) -> None:
Expand Down Expand Up @@ -128,7 +127,6 @@ def reply(self, max_tool_use: int = 128) -> Message:

return response

@observe_wrapper()
def call_function(self, tool_use: ToolUse) -> ToolResult:
"""Call the function indicated by the tool use"""
tool = self._toolmap.get(tool_use.name)
Expand Down
2 changes: 0 additions & 2 deletions packages/exchange/src/exchange/providers/anthropic.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from exchange.providers.base import Provider, Usage
from tenacity import retry, wait_fixed, stop_after_attempt
from exchange.providers.utils import retry_if_status, raise_for_status
from langfuse_wrapper.langfuse_wrapper import observe_wrapper

ANTHROPIC_HOST = "https://api.anthropic.com/v1/messages"

Expand Down Expand Up @@ -124,7 +123,6 @@ def messages_to_anthropic_spec(messages: List[Message]) -> List[Dict[str, Any]]:
messages_spec.append(converted)
return messages_spec

@observe_wrapper(as_type="generation")
def complete(
self,
model: str,
Expand Down
2 changes: 0 additions & 2 deletions packages/exchange/src/exchange/providers/bedrock.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
from tenacity import retry, wait_fixed, stop_after_attempt
from exchange.providers.utils import raise_for_status, retry_if_status
from exchange.tool import Tool
from langfuse_wrapper.langfuse_wrapper import observe_wrapper

SERVICE = "bedrock-runtime"
UTC = timezone.utc
Expand Down Expand Up @@ -176,7 +175,6 @@ def from_env(cls: Type["BedrockProvider"]) -> "BedrockProvider":
)
return cls(client=client)

@observe_wrapper(as_type="generation")
def complete(
self,
model: str,
Expand Down
3 changes: 1 addition & 2 deletions packages/exchange/src/exchange/providers/databricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
tools_to_openai_spec,
)
from exchange.tool import Tool
from langfuse_wrapper.langfuse_wrapper import observe_wrapper


retry_procedure = retry(
wait=wait_fixed(2),
Expand Down Expand Up @@ -69,7 +69,6 @@ def get_usage(data: dict) -> Usage:
total_tokens=total_tokens,
)

@observe_wrapper(as_type="generation")
def complete(
self,
model: str,
Expand Down
2 changes: 0 additions & 2 deletions packages/exchange/src/exchange/providers/google.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from exchange.providers.base import Provider, Usage
from tenacity import retry, wait_fixed, stop_after_attempt
from exchange.providers.utils import raise_for_status, retry_if_status
from langfuse_wrapper.langfuse_wrapper import observe_wrapper

GOOGLE_HOST = "https://generativelanguage.googleapis.com/v1beta"

Expand Down Expand Up @@ -122,7 +121,6 @@ def messages_to_google_spec(messages: List[Message]) -> List[Dict[str, Any]]:

return messages_spec

@observe_wrapper(as_type="generation")
def complete(
self,
model: str,
Expand Down
2 changes: 0 additions & 2 deletions packages/exchange/src/exchange/providers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
from exchange.tool import Tool
from tenacity import retry, wait_fixed, stop_after_attempt
from exchange.providers.utils import retry_if_status
from langfuse_wrapper.langfuse_wrapper import observe_wrapper

OPENAI_HOST = "https://api.openai.com/"

Expand Down Expand Up @@ -66,7 +65,6 @@ def get_usage(data: dict) -> Usage:
total_tokens=total_tokens,
)

@observe_wrapper(as_type="generation")
def complete(
self,
model: str,
Expand Down
2 changes: 0 additions & 2 deletions packages/langfuse-wrapper/.ruff.toml

This file was deleted.

28 changes: 0 additions & 28 deletions packages/langfuse-wrapper/README.md

This file was deleted.

16 changes: 0 additions & 16 deletions packages/langfuse-wrapper/env/.env.langfuse.local

This file was deleted.

28 changes: 0 additions & 28 deletions packages/langfuse-wrapper/pyproject.toml

This file was deleted.

99 changes: 0 additions & 99 deletions packages/langfuse-wrapper/scripts/setup_langfuse.sh

This file was deleted.

3 changes: 0 additions & 3 deletions packages/langfuse-wrapper/src/langfuse_wrapper/__init__.py

This file was deleted.

Loading

0 comments on commit c076b2c

Please sign in to comment.