From e296152247003d00bb38dcc45165c872c1787586 Mon Sep 17 00:00:00 2001 From: Jeongho Nam Date: Sat, 7 Sep 2024 22:26:51 +0900 Subject: [PATCH] Description comments on LLM module. --- benchmark/package.json | 2 +- errors/package.json | 2 +- src/llm.ts | 140 ++++++++++++++++++++++++++++++++++++++++- test-esm/package.json | 2 +- 4 files changed, 141 insertions(+), 5 deletions(-) diff --git a/benchmark/package.json b/benchmark/package.json index 7b9a3235f7..311214e499 100644 --- a/benchmark/package.json +++ b/benchmark/package.json @@ -72,6 +72,6 @@ "suppress-warnings": "^1.0.2", "tstl": "^3.0.0", "uuid": "^9.0.1", - "typia": "../typia-6.10.0-dev.20240906.tgz" + "typia": "../typia-6.10.0-dev.20240907.tgz" } } \ No newline at end of file diff --git a/errors/package.json b/errors/package.json index 264bf2948d..bee4c116a2 100644 --- a/errors/package.json +++ b/errors/package.json @@ -32,6 +32,6 @@ "typescript": "^5.3.2" }, "dependencies": { - "typia": "../typia-6.10.0-dev.20240906.tgz" + "typia": "../typia-6.10.0-dev.20240907.tgz" } } \ No newline at end of file diff --git a/src/llm.ts b/src/llm.ts index e7a41aa9ac..a42347fb67 100644 --- a/src/llm.ts +++ b/src/llm.ts @@ -1,7 +1,72 @@ import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; -export function application(): never; -export function application(): ILlmApplication; +/** + * > You must configure the generic argument `App`. + * + * TypeScript functions to LLM function schemas. + * + * Creates an application of LLM (Large Language Model) function calling schemas from + * a TypeScript class or interface type containig the target functions to be called by + * the LLM function calling feature. + * + * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider + * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the + * proper function and fill its arguments from the conversation (maybe chatting text) + * with user (human). This is the concept of the LLM function calling. + * + * By the way, there can be some parameters (or their nested properties) that must be + * composed by human, not by LLM. File uploading feature or some sensitive information + * like secrety key (password) are the examples. In that case, you can separate the + * function parameters to both LLM and human sides by configuring the + * {@link ILlmApplication.IOptions.separate} property. + * + * Additionally, the actual function call execution is not by LLM, but by you. + * When the LLM selects the proper function and fills the arguments, you just call + * the function with the LLM prepared arguments. And then informs the return value to + * the LLM by system prompt. The LLM will continue the next conversation based on + * the return value. + * + * @template App Target class or interface type collecting the functions to call + * @param options Options for the LLM application construction + * @returns Application of LLM function calling schemas + * @reference https://platform.openai.com/docs/guides/function-calling + * @author Jeongho Nam - https://github.com/samchon + */ +export function application(options?: ILlmApplication.IOptions): never; + +/** + * TypeScript functions to LLM function schemas. + * + * Creates an application of LLM (Large Language Model) function calling schemas from + * a TypeScript class or interface type containig the target functions to be called by + * the LLM function calling feature. + * + * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider + * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the + * proper function and fill its arguments from the conversation (maybe chatting text) + * with user (human). This is the concept of the LLM function calling. + * + * By the way, there can be some parameters (or their nested properties) that must be + * composed by human, not by LLM. File uploading feature or some sensitive information + * like secrety key (password) are the examples. In that case, you can separate the + * function parameters to both LLM and human sides by configuring the + * {@link ILlmApplication.IOptions.separate} property. + * + * Additionally, the actual function call execution is not by LLM, but by you. + * When the LLM selects the proper function and fills the arguments, you just call + * the function with the LLM prepared arguments. And then informs the return value to + * the LLM by system prompt. The LLM will continue the next conversation based on + * the return value. + * + * @template App Target class or interface type collecting the functions to call + * @param options Options for the LLM application construction + * @returns Application of LLM function calling schemas + * @reference https://platform.openai.com/docs/guides/function-calling + * @author Jeongho Nam - https://github.com/samchon + */ +export function application( + options?: ILlmApplication.IOptions, +): ILlmApplication; /** * @internal @@ -10,7 +75,78 @@ export function application(): never { halt("application"); } +/** + * > You must configure the generic argument `T`. + * + * TypeScript type to LLM type schema. + * + * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the + * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling), + * from a TypeScript type. + * + * The returned {@link ILlmSchema} type is similar to the OpenAPI v3.0 based JSON schema + * definition, but it is more simplified for the LLM function calling by remmoving the + * {@link OpenApiV3.IJson.IReference reference} type embodied by the + * {@link OpenApiV3.IJson.IReference.$ref `$ref`} proeprty. + * + * If you actually want to perform the LLM function calling with TypeScript functions, + * you can do it with the {@link application} function. Let's enjoy the + * LLM function calling with native TypeScript functions and types. + * + * > **What LLM function calling is? + * > + * > LLM (Large Language Model) selects propert function and fill the arguments, + * > but actuall function call execution is not by LLM, but by you. + * > + * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting + * > "function calling" feature. The "function calling" means that LLM automatically selects + * > a proper function and compose parameter values from the user's chatting text. + * > + * > When LLM selects the proper function and its arguments, you just call the function + * > with the arguments. And then informs the return value to the LLM by system prompt, + * > LLM will continue the next conversation based on the return value. + * + * @template T Target type + * @returns LLM schema + * @reference https://platform.openai.com/docs/guides/function-calling + * @author Jeongho Nam - https://github.com/samchon + */ export function schema(): never; + +/** + * TypeScript type to LLM type schema. + * + * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the + * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling), + * from a TypeScript type. + * + * The returned {@link ILlmSchema} type is similar to the OpenAPI v3.0 based JSON schema + * definition, but it is more simplified for the LLM function calling by remmoving the + * {@link OpenApiV3.IJson.IReference reference} type embodied by the + * {@link OpenApiV3.IJson.IReference.$ref `$ref`} proeprty. + * + * If you actually want to perform the LLM function calling with TypeScript functions, + * you can do it with the {@link application} function. Let's enjoy the + * LLM function calling with native TypeScript functions and types. + * + * > **What LLM function calling is? + * > + * > LLM (Large Language Model) selects propert function and fill the arguments, + * > but actuall function call execution is not by LLM, but by you. + * > + * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting + * > "function calling" feature. The "function calling" means that LLM automatically selects + * > a proper function and compose parameter values from the user's chatting text. + * > + * > When LLM selects the proper function and its arguments, you just call the function + * > with the arguments. And then informs the return value to the LLM by system prompt, + * > LLM will continue the next conversation based on the return value. + * + * @template T Target type + * @returns LLM schema + * @reference https://platform.openai.com/docs/guides/function-calling + * @author Jeongho Nam - https://github.com/samchon + */ export function schema(): ILlmSchema; /** diff --git a/test-esm/package.json b/test-esm/package.json index f7d1ac4840..cdddeddff6 100644 --- a/test-esm/package.json +++ b/test-esm/package.json @@ -36,6 +36,6 @@ "typescript": "^5.4.5" }, "dependencies": { - "typia": "../typia-6.10.0-dev.20240906.tgz" + "typia": "../typia-6.10.0-dev.20240907.tgz" } } \ No newline at end of file