Skip to content

Commit

Permalink
Allow LLM's model specification
Browse files Browse the repository at this point in the history
  • Loading branch information
samchon committed Nov 9, 2024
1 parent ee0597d commit a5ad9d7
Show file tree
Hide file tree
Showing 1,191 changed files with 36,170 additions and 3,793 deletions.
2 changes: 1 addition & 1 deletion benchmark/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,6 @@
"suppress-warnings": "^1.0.2",
"tstl": "^3.0.0",
"uuid": "^9.0.1",
"typia": "../typia-7.0.0-dev.20241023.tgz"
"typia": "../typia-7.0.0-dev.20241110.tgz"
}
}
2 changes: 1 addition & 1 deletion errors/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,6 @@
"typescript": "^5.3.2"
},
"dependencies": {
"typia": "../typia-7.0.0-dev.20241023.tgz"
"typia": "../typia-7.0.0-dev.20241110.tgz"
}
}
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typia",
"version": "7.0.0-dev.20241023",
"version": "7.0.0-dev.20241110",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -68,7 +68,7 @@
},
"homepage": "https://typia.io",
"dependencies": {
"@samchon/openapi": "^1.1.1",
"@samchon/openapi": "^2.0.0-dev.20241110-2",
"commander": "^10.0.0",
"comment-json": "^4.2.3",
"inquirer": "^8.2.5",
Expand Down
4 changes: 2 additions & 2 deletions packages/typescript-json/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typescript-json",
"version": "7.0.0-dev.20241023",
"version": "7.0.0-dev.20241110",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -64,7 +64,7 @@
},
"homepage": "https://typia.io",
"dependencies": {
"typia": "7.0.0-dev.20241023"
"typia": "7.0.0-dev.20241110"
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.7.0"
Expand Down
15 changes: 8 additions & 7 deletions src/internal/_llmApplicationFinalize.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,19 @@
import { ILlmApplication } from "@samchon/openapi";
import { HttpLlmConverter } from "@samchon/openapi/lib/converters/HttpLlmConverter";

import { LlmSchemaSeparator } from "@samchon/openapi/lib/utils/LlmSchemaSeparator";

export const _llmApplicationFinalize = (
app: ILlmApplication,
options?: ILlmApplication.IOptions,
export const _llmApplicationFinalize = <Model extends ILlmApplication.Model>(
app: ILlmApplication<Model>,
options?: ILlmApplication.IOptions<Model>,
): void => {
app.options = {
separate: options?.separate ?? null,
recursive: app.model === "chatgpt" ? undefined : (3 as any),
};
if (app.options.separate === null) return;
for (const func of app.functions)
func.separated = LlmSchemaSeparator.parameters({
func.separated = HttpLlmConverter.separateParameters({
model: app.model,
parameters: func.parameters,
predicator: app.options.separate,
predicate: app.options.separate,
});
};
24 changes: 18 additions & 6 deletions src/llm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
import { ILlmApplication } from "@samchon/openapi";

/**
* > You must configure the generic argument `App`.
Expand Down Expand Up @@ -33,12 +33,15 @@ import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
* before the actual LLM function call execution.
*
* @template App Target class or interface type collecting the functions to call
* @template Model LLM schema model
* @param options Options for the LLM application construction
* @returns Application of LLM function calling schemas
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
*/
export function application(options?: ILlmApplication.IOptions): never;
export function application(
options?: Partial<Omit<ILlmApplication.IOptions<any>, "recursive">>,
): never;

/**
* TypeScript functions to LLM function calling application.
Expand Down Expand Up @@ -71,14 +74,18 @@ export function application(options?: ILlmApplication.IOptions): never;
* before the actual LLM function call execution.
*
* @template App Target class or interface type collecting the functions to call
* @template Model LLM schema model
* @param options Options for the LLM application construction
* @returns Application of LLM function calling schemas
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
*/
export function application<App extends object>(
options?: ILlmApplication.IOptions,
): ILlmApplication;
export function application<
App extends object,
Model extends ILlmApplication.Model = "3.1",
>(
options?: Partial<Omit<ILlmApplication.IOptions<Model>, "recursive">>,
): ILlmApplication<Model>;

/**
* @internal
Expand Down Expand Up @@ -119,6 +126,7 @@ export function application(): never {
* > LLM will continue the next conversation based on the return value.
*
* @template T Target type
* @template Model LLM schema model
* @returns LLM schema
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
Expand Down Expand Up @@ -155,11 +163,15 @@ export function schema(): never;
* > LLM will continue the next conversation based on the return value.
*
* @template T Target type
* @template Model LLM schema model
* @returns LLM schema
* @reference https://platform.openai.com/docs/guides/function-calling
* @author Jeongho Nam - https://github.com/samchon
*/
export function schema<T>(): ILlmSchema;
export function schema<
T,
Model extends ILlmApplication.Model = "3.1",
>(): ILlmApplication.ModelSchema[Model];

/**
* @internal
Expand Down
47 changes: 31 additions & 16 deletions src/programmers/llm/LlmApplicationProgrammer.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
import { ILlmApplication } from "@samchon/openapi";
import { ILlmFunction } from "@samchon/openapi/lib/structures/ILlmFunction";

import { MetadataFactory } from "../../factories/MetadataFactory";
Expand Down Expand Up @@ -84,8 +84,11 @@ export namespace LlmApplicationProgrammer {
return output;
};

export const write = (metadata: Metadata): ILlmApplication => {
const errors: string[] = validate()(metadata, {
export const write = <Model extends ILlmApplication.Model>(props: {
model: Model;
metadata: Metadata;
}): ILlmApplication<Model> => {
const errors: string[] = validate()(props.metadata, {
top: true,
object: null,
property: null,
Expand All @@ -98,8 +101,9 @@ export namespace LlmApplicationProgrammer {
if (errors.length)
throw new Error("Failed to write LLM application: " + errors.join("\n"));

const object: MetadataObjectType = metadata.objects[0]!.type;
const object: MetadataObjectType = props.metadata.objects[0]!.type;
return {
model: props.model,
functions: object.properties
.filter(
(p) =>
Expand All @@ -112,6 +116,7 @@ export namespace LlmApplicationProgrammer {
)
.map((p) =>
writeFunction({
model: props.model,
name: p.key.getSoleLiteral()!,
function: p.value.functions[0]!,
description: p.description,
Expand All @@ -120,16 +125,18 @@ export namespace LlmApplicationProgrammer {
),
options: {
separate: null,
recursive: props.model === "chatgpt" ? undefined : (3 as any),
},
};
};

const writeFunction = (props: {
const writeFunction = <Model extends ILlmApplication.Model>(props: {
model: Model;
name: string;
function: MetadataFunction;
description: string | null;
jsDocTags: IJsDocTagInfo[];
}): ILlmFunction => {
}): ILlmFunction<ILlmApplication.ModelSchema[Model]> => {
const deprecated: boolean = props.jsDocTags.some(
(tag) => tag.name === "deprecated",
);
Expand All @@ -154,6 +161,7 @@ export namespace LlmApplicationProgrammer {
},
);
return writeSchema({
model: props.model,
metadata: p.type,
description: jsDocTagDescription ?? p.description,
jsDocTags: jsDocTagDescription ? [] : p.jsDocTags,
Expand All @@ -162,6 +170,7 @@ export namespace LlmApplicationProgrammer {
output:
props.function.output.size() || props.function.output.nullable
? writeSchema({
model: props.model,
metadata: props.function.output,
description:
writeDescriptionFromJsDocTag({
Expand All @@ -181,17 +190,22 @@ export namespace LlmApplicationProgrammer {
};
};

const writeSchema = (props: {
const writeSchema = <Model extends ILlmApplication.Model>(props: {
model: Model;
metadata: Metadata;
description: string | null;
jsDocTags: IJsDocTagInfo[];
}): ILlmSchema => {
const schema: ILlmSchema = LlmSchemaProgrammer.write(props.metadata);
const explicit: Pick<ILlmSchema, "title" | "description"> =
writeDescription({
description: props.description,
jsDocTags: props.jsDocTags,
});
}): ILlmApplication.ModelSchema[Model] => {
const schema: ILlmApplication.ModelSchema[Model] =
LlmSchemaProgrammer.write(props);
const explicit: Pick<
ILlmApplication.ModelSchema[Model],
"title" | "description"
> = writeDescription({
model: props.model,
description: props.description,
jsDocTags: props.jsDocTags,
});
return {
...schema,
...(!!explicit.title?.length || !!explicit.description?.length
Expand All @@ -200,10 +214,11 @@ export namespace LlmApplicationProgrammer {
};
};

const writeDescription = (props: {
const writeDescription = <Model extends ILlmApplication.Model>(props: {
model: Model;
description: string | null;
jsDocTags: IJsDocTagInfo[];
}): Pick<ILlmSchema, "title" | "description"> => {
}): Pick<ILlmApplication.ModelSchema[Model], "title" | "description"> => {
const title: string | undefined = (() => {
const [explicit] = getJsDocTexts({
jsDocTags: props.jsDocTags,
Expand Down
34 changes: 20 additions & 14 deletions src/programmers/llm/LlmSchemaProgrammer.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ILlmSchema } from "@samchon/openapi";
import { ILlmApplication } from "@samchon/openapi";
import { HttpLlmConverter } from "@samchon/openapi/lib/converters/HttpLlmConverter";

import { IJsonSchemaCollection } from "../../schemas/json/IJsonSchemaCollection";
Expand Down Expand Up @@ -34,26 +34,32 @@ export namespace LlmSchemaProgrammer {
native.name !== "File"
)
output.push(`LLM schema does not support ${native.name} type.`);
if (
metadata.aliases.some((a) => a.type.recursive) ||
metadata.arrays.some((a) => a.type.recursive) ||
metadata.objects.some((o) => o.type.recursive) ||
metadata.tuples.some((t) => t.type.recursive)
)
output.push("LLM schema does not support recursive type.");
// if (
// metadata.aliases.some((a) => a.type.recursive) ||
// metadata.arrays.some((a) => a.type.recursive) ||
// metadata.objects.some((o) => o.type.recursive) ||
// metadata.tuples.some((t) => t.type.recursive)
// )
// output.push("LLM schema does not support recursive type.");
return output;
};

export const write = (metadata: Metadata): ILlmSchema => {
export const write = <Model extends ILlmApplication.Model>(props: {
model: Model;
metadata: Metadata;
}): ILlmApplication.ModelSchema[Model] => {
const collection: IJsonSchemaCollection<"3.1"> =
JsonSchemasProgrammer.write({
version: "3.1",
metadatas: [metadata],
metadatas: [props.metadata],
});
const schema: ILlmApplication.ModelSchema[Model] | null =
HttpLlmConverter.schema({
model: props.model,
components: collection.components,
schema: collection.schemas[0]!,
recursive: 3,
});
const schema: ILlmSchema | null = HttpLlmConverter.schema({
components: collection.components,
schema: collection.schemas[0]!,
});
if (schema === null)
throw new Error("Failed to convert JSON schema to LLM schema.");
return schema;
Expand Down
51 changes: 50 additions & 1 deletion src/transformers/features/llm/LlmApplicationTransformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,22 @@ export namespace LlmApplicationTransformer {
});

// GENERATE LLM APPLICATION
const schema: ILlmApplication = LlmApplicationProgrammer.write(result.data);
const model: ILlmApplication.Model = get_parameter<ILlmApplication.Model>({
checker: props.context.checker,
name: "Model",
is: (value) =>
value === "3.1" ||
value === "3.0" ||
value === "chatgpt" ||
value === "gemini",
cast: (value) => value as ILlmApplication.Model,
default: () => "3.1",
})(props.expression.typeArguments[1]);
const schema: ILlmApplication<ILlmApplication.Model> =
LlmApplicationProgrammer.write({
model,
metadata: result.data,
});
const literal: ts.Expression = LiteralFactory.write(schema);
if (!props.expression.arguments?.[0]) return literal;

Expand Down Expand Up @@ -83,4 +98,38 @@ export namespace LlmApplicationTransformer {
),
);
};

const get_parameter =
<Value>(props: {
checker: ts.TypeChecker;
name: string;
is: (value: string) => boolean;
cast: (value: string) => Value;
default: () => Value;
}) =>
(node: ts.TypeNode | undefined): Value => {
if (!node) return props.default();

// CHECK LITERAL TYPE
const type: ts.Type = props.checker.getTypeFromTypeNode(node);
if (
!type.isLiteral() &&
(type.getFlags() & ts.TypeFlags.BooleanLiteral) === 0
)
throw new TransformerError({
code: "typia.llm.application",
message: `generic argument "${props.name}" must be constant.`,
});

// GET VALUE AND VALIDATE IT
const value = type.isLiteral()
? type.value
: props.checker.typeToString(type);
if (typeof value !== "string" || props.is(value) === false)
throw new TransformerError({
code: "typia.llm.application",
message: `invalid value on generic argument "${props.name}".`,
});
return props.cast(value);
};
}
Loading

0 comments on commit a5ad9d7

Please sign in to comment.