Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(js): Add support for formatting LangChain prompts as OpenAI and Anthropic payloads #1038

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/actions/js-integration-tests/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ inputs:
openai-api-key:
description: "OpenAI API key"
required: false
anthropic-api-key:
description: "Anthropic API key"
required: false
runs:
using: "composite"
steps:
Expand All @@ -36,6 +39,7 @@ runs:
shell: bash
working-directory: js
env:
ANTHROPIC_API_KEY: ${{ inputs.anthropic-api-key }}
LANGCHAIN_TRACING_V2: "true"
LANGCHAIN_ENDPOINT: ${{ inputs.langchain-endpoint }}
LANGCHAIN_API_KEY: ${{ inputs.langchain-api-key }}
Expand Down
8 changes: 8 additions & 0 deletions js/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,14 @@ Chinook_Sqlite.sql
/singletons/traceable.js
/singletons/traceable.d.ts
/singletons/traceable.d.cts
/utils/prompts/anthropic.cjs
/utils/prompts/anthropic.js
/utils/prompts/anthropic.d.ts
/utils/prompts/anthropic.d.cts
/utils/prompts/openai.cjs
/utils/prompts/openai.js
/utils/prompts/openai.d.ts
/utils/prompts/openai.d.cts
/index.cjs
/index.js
/index.d.ts
Expand Down
32 changes: 30 additions & 2 deletions js/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "langsmith",
"version": "0.1.60",
"version": "0.1.61",
"description": "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform.",
"packageManager": "yarn@1.22.19",
"files": [
Expand Down Expand Up @@ -53,6 +53,14 @@
"singletons/traceable.js",
"singletons/traceable.d.ts",
"singletons/traceable.d.cts",
"utils/prompts/anthropic.cjs",
"utils/prompts/anthropic.js",
"utils/prompts/anthropic.d.ts",
"utils/prompts/anthropic.d.cts",
"utils/prompts/openai.cjs",
"utils/prompts/openai.js",
"utils/prompts/openai.d.ts",
"utils/prompts/openai.d.cts",
"index.cjs",
"index.js",
"index.d.ts",
Expand Down Expand Up @@ -106,12 +114,14 @@
},
"devDependencies": {
"@ai-sdk/openai": "^0.0.40",
"@anthropic-ai/sdk": "^0.27.3",
"@babel/preset-env": "^7.22.4",
"@faker-js/faker": "^8.4.1",
"@jest/globals": "^29.5.0",
"@langchain/anthropic": "^0.3.2",
"@langchain/core": "^0.3.1",
"@langchain/langgraph": "^0.2.3",
"@langchain/openai": "^0.3.0",
"@langchain/openai": "^0.3.1",
"@tsconfig/recommended": "^1.0.2",
"@types/jest": "^29.5.1",
"@typescript-eslint/eslint-plugin": "^5.59.8",
Expand Down Expand Up @@ -266,6 +276,24 @@
"import": "./singletons/traceable.js",
"require": "./singletons/traceable.cjs"
},
"./utils/prompts/anthropic": {
"types": {
"import": "./utils/prompts/anthropic.d.ts",
"require": "./utils/prompts/anthropic.d.cts",
"default": "./utils/prompts/anthropic.d.ts"
},
"import": "./utils/prompts/anthropic.js",
"require": "./utils/prompts/anthropic.cjs"
},
"./utils/prompts/openai": {
"types": {
"import": "./utils/prompts/openai.d.ts",
"require": "./utils/prompts/openai.d.cts",
"default": "./utils/prompts/openai.d.ts"
},
"import": "./utils/prompts/openai.js",
"require": "./utils/prompts/openai.cjs"
},
"./package.json": "./package.json"
}
}
2 changes: 2 additions & 0 deletions js/scripts/create-entrypoints.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@ const entrypoints = {
"wrappers/openai": "wrappers/openai",
"wrappers/vercel": "wrappers/vercel",
"singletons/traceable": "singletons/traceable",
"utils/prompts/anthropic": "utils/prompts/anthropic",
"utils/prompts/openai": "utils/prompts/openai"
};

const updateJsonFile = (relativePath, updateFunction) => {
Expand Down
2 changes: 1 addition & 1 deletion js/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ export { RunTree, type RunTreeConfig } from "./run_trees.js";
export { overrideFetchImplementation } from "./singletons/fetch.js";

// Update using yarn bump-version
export const __version__ = "0.1.60";
export const __version__ = "0.1.61";
57 changes: 57 additions & 0 deletions js/src/utils/prompts/anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
/* eslint-disable import/no-extraneous-dependencies */
import type { BasePromptValue } from "@langchain/core/prompt_values";
import * as langChainAnthropicImports from "@langchain/anthropic";
import Anthropic from "@anthropic-ai/sdk";

/**
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
* a format expected by Anthropic's JS SDK.
*
* Requires the "@langchain/anthropic" package to be installed in addition
* to the Anthropic SDK.
*
* @example
* ```ts
* import { convertPromptToAnthropic } from "langsmith/utils/hub/anthropic";
* import { pull } from "langchain/hub";
*
* import Anthropic from '@anthropic-ai/sdk';
*
* const prompt = await pull("jacob/joke-generator");
* const formattedPrompt = await prompt.invoke({
* topic: "cats",
* });
*
* const { system, messages } = convertPromptToAnthropic(formattedPrompt);
*
* const anthropicClient = new Anthropic({
* apiKey: 'your_api_key',
* });
*
* const anthropicResponse = await anthropicClient.messages.create({
* model: "claude-3-5-sonnet-20240620",
* max_tokens: 1024,
* stream: false,
* system,
* messages,
* });
* ```
* @param formattedPrompt
* @returns A partial Anthropic payload.
*/
export function convertPromptToAnthropic(
formattedPrompt: BasePromptValue
): Anthropic.Messages.MessageCreateParams {
const messages = formattedPrompt.toChatMessages();
const { _convertMessagesToAnthropicPayload } = langChainAnthropicImports;
if (typeof _convertMessagesToAnthropicPayload !== "function") {
throw new Error(
`Please update your version of "@langchain/anthropic" to 0.3.2 or higher.`
);
}
const anthropicBody = _convertMessagesToAnthropicPayload(messages);
if (anthropicBody.messages === undefined) {
anthropicBody.messages = [];
}
return anthropicBody;
}
52 changes: 52 additions & 0 deletions js/src/utils/prompts/openai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/* eslint-disable import/no-extraneous-dependencies */
import type { BasePromptValue } from "@langchain/core/prompt_values";
import * as langChainOpenAIImports from "@langchain/openai";
import type { OpenAI } from "openai";

/**
* Convert a formatted LangChain prompt (e.g. pulled from the hub) into
* a format expected by OpenAI's JS SDK.
*
* Requires the "@langchain/openai" package to be installed in addition
* to the OpenAI SDK.
*
* @example
* ```ts
* import { convertPromptToOpenAI } from "langsmith/utils/hub/openai";
* import { pull } from "langchain/hub";
*
* import OpenAI from 'openai';
*
* const prompt = await pull("jacob/joke-generator");
* const formattedPrompt = await prompt.invoke({
* topic: "cats",
* });
*
* const { messages } = convertPromptToOpenAI(formattedPrompt);
*
* const openAIClient = new OpenAI();
*
* const openaiResponse = await openAIClient.chat.completions.create({
* model: "gpt-4o",
* messages,
* });
* ```
* @param formattedPrompt
* @returns A partial OpenAI payload.
*/
export function convertPromptToOpenAI(formattedPrompt: BasePromptValue): {
messages: OpenAI.Chat.ChatCompletionMessageParam[];
} {
const messages = formattedPrompt.toChatMessages();
const { _convertMessagesToOpenAIParams } = langChainOpenAIImports;
if (typeof _convertMessagesToOpenAIParams !== "function") {
throw new Error(
`Please update your version of "@langchain/openai" to 0.3.1 or higher.`
);
}
return {
messages: _convertMessagesToOpenAIParams(
messages
) as OpenAI.Chat.ChatCompletionMessageParam[],
};
}
25 changes: 25 additions & 0 deletions js/src/utils/prompts/tests/anthropic.int.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import Anthropic from "@anthropic-ai/sdk";
import { pull } from "langchain/hub";

import { convertPromptToAnthropic } from "../anthropic.js";

test("basic traceable implementation", async () => {
const prompt = await pull("jacob/joke-generator");
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe silly question, but what's the need to do this in the SDK if we are pulling from langchain/hub anyway?

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Optics and consistency with Python?

const formattedPrompt = await prompt.invoke({
topic: "cats",
});

const { system, messages } = convertPromptToAnthropic(formattedPrompt);

const anthropicClient = new Anthropic();

const anthropicResponse = await anthropicClient.messages.create({
model: "claude-3-haiku-20240307",
system,
messages: messages,
max_tokens: 1024,
stream: false,
});

expect(anthropicResponse.content).toBeDefined();
});
22 changes: 22 additions & 0 deletions js/src/utils/prompts/tests/openai.int.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
import OpenAI from "openai";
import { pull } from "langchain/hub";

import { convertPromptToOpenAI } from "../openai.js";

test("basic traceable implementation", async () => {
const prompt = await pull("jacob/joke-generator");
const formattedPrompt = await prompt.invoke({
topic: "cats",
});

const { messages } = convertPromptToOpenAI(formattedPrompt);

const openAIClient = new OpenAI();

const openAIResponse = await openAIClient.chat.completions.create({
model: "gpt-4o-mini",
messages,
});

expect(openAIResponse.choices.length).toBeGreaterThan(0);
});
4 changes: 3 additions & 1 deletion js/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@
"src/anonymizer/index.ts",
"src/wrappers/openai.ts",
"src/wrappers/vercel.ts",
"src/singletons/traceable.ts"
"src/singletons/traceable.ts",
"src/utils/prompts/anthropic.ts",
"src/utils/prompts/openai.ts"
]
}
}
52 changes: 51 additions & 1 deletion js/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,19 @@
"@jridgewell/gen-mapping" "^0.3.0"
"@jridgewell/trace-mapping" "^0.3.9"

"@anthropic-ai/sdk@^0.27.3":
version "0.27.3"
resolved "https://registry.yarnpkg.com/@anthropic-ai/sdk/-/sdk-0.27.3.tgz#592cdd873c85ffab9589ae6f2e250cbf150e1475"
integrity sha512-IjLt0gd3L4jlOfilxVXTifn42FnVffMgDC04RJK1KDZpmkBWLv0XC92MVVmkxrFZNS/7l3xWgP/I3nqtX1sQHw==
dependencies:
"@types/node" "^18.11.18"
"@types/node-fetch" "^2.6.4"
abort-controller "^3.0.0"
agentkeepalive "^4.2.1"
form-data-encoder "1.7.2"
formdata-node "^4.3.2"
node-fetch "^2.6.7"

"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.12.13", "@babel/code-frame@^7.21.4", "@babel/code-frame@^7.22.13":
version "7.22.13"
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e"
Expand Down Expand Up @@ -1368,6 +1381,16 @@
"@jridgewell/resolve-uri" "3.1.0"
"@jridgewell/sourcemap-codec" "1.4.14"

"@langchain/anthropic@^0.3.2":
version "0.3.2"
resolved "https://registry.yarnpkg.com/@langchain/anthropic/-/anthropic-0.3.2.tgz#ca28576573c5b2b9d2277f959100996603a2b977"
integrity sha512-Bgb0SyxQcX+/GOGQ66RsmNmNdnXwpvQt9HLNnwPOSDmgJIegzst3KpB/iHbckgiWtHXBE2ETzWqkLR38/kfHpQ==
dependencies:
"@anthropic-ai/sdk" "^0.27.3"
fast-xml-parser "^4.4.1"
zod "^3.22.4"
zod-to-json-schema "^3.22.4"

"@langchain/core@^0.3.1":
version "0.3.1"
resolved "https://registry.yarnpkg.com/@langchain/core/-/core-0.3.1.tgz#f06206809575b2a95eaef609b3273842223c0786"
Expand Down Expand Up @@ -1402,7 +1425,7 @@
uuid "^10.0.0"
zod "^3.23.8"

"@langchain/openai@>=0.1.0 <0.4.0", "@langchain/openai@^0.3.0":
"@langchain/openai@>=0.1.0 <0.4.0":
version "0.3.0"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.3.0.tgz#89329ab9350187269a471dac2c2f4fca5f1fc5a3"
integrity sha512-yXrz5Qn3t9nq3NQAH2l4zZOI4ev2CFdLC5kvmi5SdW4bggRuM40SXTUAY3VRld4I5eocYfk82VbrlA+6dvN5EA==
Expand All @@ -1412,6 +1435,16 @@
zod "^3.22.4"
zod-to-json-schema "^3.22.3"

"@langchain/openai@^0.3.1":
version "0.3.1"
resolved "https://registry.yarnpkg.com/@langchain/openai/-/openai-0.3.1.tgz#3841cf992f4da571514798ee5abf8e20cb66933f"
integrity sha512-+BEIs8zw4QJE9RYce4K0oPScWYZjQJ+MKgNqS3/kEc2lRBdw2061yy0raO6/HW6+ir3qUh8oABT/5BNuzSkgkw==
dependencies:
js-tiktoken "^1.0.12"
openai "^4.57.3"
zod "^3.22.4"
zod-to-json-schema "^3.22.3"

"@langchain/textsplitters@>=0.0.0 <0.2.0":
version "0.1.0"
resolved "https://registry.yarnpkg.com/@langchain/textsplitters/-/textsplitters-0.1.0.tgz#f37620992192df09ecda3dfbd545b36a6bcbae46"
Expand Down Expand Up @@ -2669,6 +2702,13 @@ fast-levenshtein@^2.0.6:
resolved "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz"
integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==

fast-xml-parser@^4.4.1:
version "4.5.0"
resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.5.0.tgz#2882b7d01a6825dfdf909638f2de0256351def37"
integrity sha512-/PlTQCI96+fZMAOLMZK4CWG1ItCbfZ/0jx7UIJFChPNrx7tcEgerUgWbeieCM9MfHInUDyK8DWYZ+YrywDJuTg==
dependencies:
strnum "^1.0.5"

fastq@^1.6.0:
version "1.15.0"
resolved "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz"
Expand Down Expand Up @@ -4488,6 +4528,11 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
resolved "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==

strnum@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/strnum/-/strnum-1.0.5.tgz#5c4e829fe15ad4ff0d20c3db5ac97b73c9b072db"
integrity sha512-J8bbNyKKXl5qYcR36TIO8W3mVGVHrmmxsd5PAItGkmyzwJvybiw2IVq5nqd0i4LSNSkB/sx9VHllbfFdr9k1JA==

supports-color@^5.3.0:
version "5.5.0"
resolved "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz"
Expand Down Expand Up @@ -4881,6 +4926,11 @@ zod-to-json-schema@^3.22.3:
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.22.4.tgz#f8cc691f6043e9084375e85fb1f76ebafe253d70"
integrity sha512-2Ed5dJ+n/O3cU383xSY28cuVi0BCQhF8nYqWU5paEpl7fVdqdAmiLdqLyfblbNdfOFwFfi/mqU4O1pwc60iBhQ==

zod-to-json-schema@^3.22.4:
version "3.23.3"
resolved "https://registry.yarnpkg.com/zod-to-json-schema/-/zod-to-json-schema-3.23.3.tgz#56cf4e0bd5c4096ab46e63159e20998ec7b19c39"
integrity sha512-TYWChTxKQbRJp5ST22o/Irt9KC5nj7CdBKYB/AosCRdj/wxEMvv4NNaj9XVUHDOIp53ZxArGhnw5HMZziPFjog==

zod@^3.22.4:
version "3.22.4"
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
Expand Down
Loading